private void CreateAuxiliaryMats() {
    if (mRgba.empty()) return;

    mSizeRgba = mRgba.size();

    int rows = (int) mSizeRgba.height;
    int cols = (int) mSizeRgba.width;

    int left = cols / 8;
    int top = rows / 8;

    int width = cols * 3 / 4;
    int height = rows * 3 / 4;

    if (mRgbaInnerWindow == null)
      mRgbaInnerWindow = mRgba.submat(top, top + height, left, left + width);
    mSizeRgbaInner = mRgbaInnerWindow.size();

    if (mGrayInnerWindow == null && !mGray.empty())
      mGrayInnerWindow = mGray.submat(top, top + height, left, left + width);

    if (mBlurWindow == null) mBlurWindow = mRgba.submat(0, rows, cols / 3, cols * 2 / 3);

    if (mZoomCorner == null)
      mZoomCorner = mRgba.submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);

    if (mZoomWindow == null)
      mZoomWindow =
          mRgba.submat(
              rows / 2 - 9 * rows / 100,
              rows / 2 + 9 * rows / 100,
              cols / 2 - 9 * cols / 100,
              cols / 2 + 9 * cols / 100);
  }
Пример #2
0
  private void CreateAuxiliaryMats() {
    if (mGray.empty()) return;

    int rows = mGray.rows();
    int cols = mGray.cols();

    if (mZoomWindow == null) {
      mZoomWindow = mRgba.submat(rows / 2 + rows / 10, rows, cols / 2 + cols / 10, cols);
      mZoomWindow2 = mRgba.submat(0, rows / 2 - rows / 10, cols / 2 + cols / 10, cols);
    }
  }
  private ArrayList<Double> applyThresholdOnImage(Mat srcImgMat, Mat outputImgMat) {
    double localThreshold;
    int startRow;
    int endRow;
    int startCol;
    int endCol;

    ArrayList<Double> localThresholds = new ArrayList<Double>();

    int numberOfTiles = mPreference.getNumberOfTiles();
    int tileWidth = (int) srcImgMat.size().height / numberOfTiles;
    int tileHeight = (int) srcImgMat.size().width / numberOfTiles;

    // Split image into tiles and apply threshold on each image tile separately.

    // process image tiles other than the last one.
    for (int tileRowCount = 0; tileRowCount < numberOfTiles; tileRowCount++) {
      startRow = tileRowCount * tileWidth;
      if (tileRowCount < numberOfTiles - 1) endRow = (tileRowCount + 1) * tileWidth;
      else endRow = (int) srcImgMat.size().height;

      for (int tileColCount = 0; tileColCount < numberOfTiles; tileColCount++) {
        startCol = tileColCount * tileHeight;
        if (tileColCount < numberOfTiles - 1) endCol = (tileColCount + 1) * tileHeight;
        else endCol = (int) srcImgMat.size().width;

        Mat tileThreshold = new Mat();
        Mat tileMat = srcImgMat.submat(startRow, endRow, startCol, endCol);
        // localThreshold = Imgproc.threshold(tileMat, tileThreshold, 0, 255, Imgproc.THRESH_BINARY
        // | Imgproc.THRESH_OTSU);
        // RNM: Adaptive threshold rules!
        localThreshold = 0x80;
        Imgproc.adaptiveThreshold(
            tileMat,
            tileThreshold,
            255,
            Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C,
            Imgproc.THRESH_BINARY,
            91,
            2);
        Mat copyMat = outputImgMat.submat(startRow, endRow, startCol, endCol);
        tileThreshold.copyTo(copyMat);
        tileThreshold.release();
        localThresholds.add(localThreshold);
      }
    }

    return localThresholds;
  }
 private void displayMarkerImage(Mat srcImgMat, Mat destImageMat) {
   // find location of image segment to be replaced in the destination image.
   Rect rect = calculateImageSegmentArea(destImageMat);
   Mat destSubmat = destImageMat.submat(rect.y, rect.y + rect.height, rect.x, rect.x + rect.width);
   // copy image.
   srcImgMat.copyTo(destSubmat);
 }
Пример #5
0
 public static Mat findCardNumber(String path) {
   Mat mat = Highgui.imread(path);
   int x = 0;
   int y = (int) (mat.height() * ((double) 30 / 54));
   int width = mat.cols();
   int height = (int) (mat.height() * ((double) 7 / 54));
   return mat.submat(new Rect(x, y, width, height));
 }
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();

    if (mIsColorSelected) {
      abc = mDetector.process(mRgba);
      // hehe.setText(abc.toString());
      List<MatOfPoint> contours = mDetector.getContours();
      Log.e(TAG, "Contours count: " + contours.size());
      Imgproc.drawContours(mRgba, contours, -1, CONTOUR_COLOR);

      Mat colorLabel = mRgba.submat(4, 68, 4, 68);
      colorLabel.setTo(mBlobColorRgba);

      Mat spectrumLabel = mRgba.submat(4, 4 + mSpectrum.rows(), 70, 70 + mSpectrum.cols());
      mSpectrum.copyTo(spectrumLabel);
    }

    return mRgba;
  }
 private void copyThresholdedImageToRgbImgMat(Mat thresholdedImgMat, Mat dest) {
   // convert thresholded image segment to RGB.
   Mat smallRegionImg = new Mat();
   Imgproc.cvtColor(thresholdedImgMat, smallRegionImg, Imgproc.COLOR_GRAY2BGRA, 4);
   // find location of image segment to be replaced in the destination image.
   Rect rect = calculateImageSegmentArea(dest);
   Mat destSubmat = dest.submat(rect.y, rect.y + rect.height, rect.x, rect.x + rect.width);
   // copy image.
   smallRegionImg.copyTo(destSubmat);
   smallRegionImg.release();
 }
Пример #8
0
  @Override
  public void onPreviewFrame(byte[] data, Camera camera) {
    if (mDetecting == true) return;

    mDetecting = true;

    mFrameBuffer.put(0, 0, data);
    Message msg = Message.obtain();
    msg.obj = mFrameBuffer.submat(0, mFrameWidth, 0, mFrameHeight);
    mHandler.sendMessage(msg);
  }
Пример #9
0
  private Mat get_template(CascadeClassifier clasificator, Rect area, int size) {
    Mat template = new Mat();
    Mat mROI = mGray.submat(area);
    MatOfRect eyes = new MatOfRect();
    Point iris = new Point();
    Rect eye_template = new Rect();
    clasificator.detectMultiScale(
        mROI,
        eyes,
        1.15,
        2,
        Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE,
        new Size(30, 30),
        new Size());

    Rect[] eyesArray = eyes.toArray();
    for (int i = 0; i < eyesArray.length; ) {
      Rect e = eyesArray[i];
      e.x = area.x + e.x;
      e.y = area.y + e.y;
      Rect eye_only_rectangle =
          new Rect(
              (int) e.tl().x,
              (int) (e.tl().y + e.height * 0.4),
              (int) e.width,
              (int) (e.height * 0.6));
      mROI = mGray.submat(eye_only_rectangle);
      Mat vyrez = mRgba.submat(eye_only_rectangle);

      Core.MinMaxLocResult mmG = Core.minMaxLoc(mROI);

      Imgproc.circle(vyrez, mmG.minLoc, 2, new Scalar(255, 255, 255, 255), 2);
      iris.x = mmG.minLoc.x + eye_only_rectangle.x;
      iris.y = mmG.minLoc.y + eye_only_rectangle.y;
      eye_template = new Rect((int) iris.x - size / 2, (int) iris.y - size / 2, size, size);
      Imgproc.rectangle(mRgba, eye_template.tl(), eye_template.br(), new Scalar(255, 0, 0, 255), 2);
      template = (mGray.submat(eye_template)).clone();
      return template;
    }
    return template;
  }
Пример #10
0
 private Mat buildROI(Mat src) {
   int offset = mZoomImg.getWidth();
   int rows = src.rows();
   int cols = src.cols();
   Mat sub =
       src.submat(
           rows / 2 - offset / 2,
           rows / 2 + offset / 2,
           cols / 2 - offset / 2,
           cols / 2 + offset / 2);
   return sub;
 }
Пример #11
0
  private double match_eye(Rect area, Mat mTemplate, int type) {
    Point matchLoc;
    Mat mROI = mGray.submat(area);
    int result_cols = mGray.cols() - mTemplate.cols() + 1;
    int result_rows = mGray.rows() - mTemplate.rows() + 1;
    if (mTemplate.cols() == 0 || mTemplate.rows() == 0) {
      return 0.0;
    }
    mResult = new Mat(result_cols, result_rows, CvType.CV_32FC1);

    switch (type) {
      case TM_SQDIFF:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_SQDIFF);
        break;
      case TM_SQDIFF_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_SQDIFF_NORMED);
        break;
      case TM_CCOEFF:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCOEFF);
        break;
      case TM_CCOEFF_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCOEFF_NORMED);
        break;
      case TM_CCORR:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCORR);
        break;
      case TM_CCORR_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCORR_NORMED);
        break;
    }

    Core.MinMaxLocResult mmres = Core.minMaxLoc(mResult);

    if (type == TM_SQDIFF || type == TM_SQDIFF_NORMED) {
      matchLoc = mmres.minLoc;
    } else {
      matchLoc = mmres.maxLoc;
    }

    Point matchLoc_tx = new Point(matchLoc.x + area.x, matchLoc.y + area.y);
    Point matchLoc_ty =
        new Point(matchLoc.x + mTemplate.cols() + area.x, matchLoc.y + mTemplate.rows() + area.y);

    Core.rectangle(mRgba, matchLoc_tx, matchLoc_ty, new Scalar(255, 255, 0, 255));

    if (type == TM_SQDIFF || type == TM_SQDIFF_NORMED) {
      return mmres.maxVal;
    } else {
      return mmres.minVal;
    }
  }
Пример #12
0
  // OpenCV code
  private void modifyImage(String fileName) {
    // Create a face detector from the cascade file
    CascadeClassifier faceDetector = new CascadeClassifier("haarcascade_frontalface_alt.xml");
    Mat image = Highgui.imread(fileName);

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    // Blur each face
    for (Rect rect : faceDetections.toArray()) {
      Mat faceArea = image.submat(rect);
      Imgproc.blur(faceArea, faceArea, new Size(30, 30));
    }
    // Save the modified image
    Highgui.imwrite("edited_" + fileName, image);
  }
Пример #13
0
  @Override
  protected Bitmap processFrame(VideoCapture capture) {
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) ;
      {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
      mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
      if (mJavaDetector != null)
        mJavaDetector.detectMultiScale(
            mGray,
            faces,
            1.1,
            2,
            2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
            ,
            new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
            new Size());

      if (mZoomCorner == null || mZoomWindow == null) CreateAuxiliaryMats();

      Rect[] facesArray = faces.toArray();

      for (int i = 0; i < facesArray.length; i++) {
        Rect r = facesArray[i];
        Core.rectangle(mGray, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
        Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);

        eyearea =
            new Rect(
                r.x + r.width / 8,
                (int) (r.y + (r.height / 4.5)),
                r.width - 2 * r.width / 8,
                (int) (r.height / 3.0));
        Core.rectangle(mRgba, eyearea.tl(), eyearea.br(), new Scalar(255, 0, 0, 255), 2);
        Rect eyearea_right =
            new Rect(
                r.x + r.width / 16,
                (int) (r.y + (r.height / 4.5)),
                (r.width - 2 * r.width / 16) / 2,
                (int) (r.height / 3.0));
        Rect eyearea_left =
            new Rect(
                r.x + r.width / 16 + (r.width - 2 * r.width / 16) / 2,
                (int) (r.y + (r.height / 4.5)),
                (r.width - 2 * r.width / 16) / 2,
                (int) (r.height / 3.0));
        Core.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(), new Scalar(255, 0, 0, 255), 2);
        Core.rectangle(
            mRgba, eyearea_right.tl(), eyearea_right.br(), new Scalar(255, 0, 0, 255), 2);

        if (learn_frames < 5) {
          teplateR = get_template(mCascadeER, eyearea_right, 24);
          teplateL = get_template(mCascadeEL, eyearea_left, 24);
          learn_frames++;
        } else {

          match_value = match_eye(eyearea_right, teplateR, FdActivity.method);

          match_value = match_eye(eyearea_left, teplateL, FdActivity.method);
          ;
        }
        Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2, mZoomWindow2.size());
        Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow, mZoomWindow.size());
      }
    } else if (mDetectorType == NATIVE_DETECTOR) {
      if (mNativeDetector != null) mNativeDetector.detect(mGray, faces);
    } else {
      Log.e(TAG, "Detection method is not selected!");
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
      Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

    try {
      Utils.matToBitmap(mRgba, bmp);
    } catch (Exception e) {
      Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
      bmp.recycle();
      bmp = null;
    }

    return bmp;
  }
Пример #14
0
  private void match_eye(Rect area, Mat mTemplate, int type) {
    Point matchLoc;
    Mat mROI = mGray.submat(area);
    int result_cols = mROI.cols() - mTemplate.cols() + 1;
    int result_rows = mROI.rows() - mTemplate.rows() + 1;
    // Check for bad template size
    if (mTemplate.cols() == 0 || mTemplate.rows() == 0) {
      return;
    }
    Mat mResult = new Mat(result_cols, result_rows, CvType.CV_8U);
    long nbPixels = (mResult.rows() * mResult.cols()) - getBlackPixels(mResult);
    if (Math.abs(nbPixels) < 2000) {
      final MediaPlayer mp = new MediaPlayer();
      try {
        mp.reset();
        AssetFileDescriptor afd;
        afd = getAssets().openFd("wakeUp.mp3");
        mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
        mp.prepare();
        mp.start();
        Thread.sleep(60000);
      } catch (IllegalStateException e) {
        e.printStackTrace();
      } catch (IOException e) {
        e.printStackTrace();
      } catch (InterruptedException e) {
        e.printStackTrace();
      }

      Log.i("You are sleeping", "YOU SLEPT");
    } else Log.i("M_match_eye: else ", "nbPixels = " + nbPixels);

    switch (type) {
      case TM_SQDIFF:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_SQDIFF);
        break;
      case TM_SQDIFF_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_SQDIFF_NORMED);
        break;
      case TM_CCOEFF:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCOEFF);
        break;
      case TM_CCOEFF_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCOEFF_NORMED);
        break;
      case TM_CCORR:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCORR);
        break;
      case TM_CCORR_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCORR_NORMED);
        break;
    }

    Core.MinMaxLocResult mmres = Core.minMaxLoc(mResult);
    // there is difference in matching methods - best match is max/min value
    if (type == TM_SQDIFF || type == TM_SQDIFF_NORMED) {
      matchLoc = mmres.minLoc;
    } else {
      matchLoc = mmres.maxLoc;
    }

    Point matchLoc_tx = new Point(matchLoc.x + area.x, matchLoc.y + area.y);
    Point matchLoc_ty =
        new Point(matchLoc.x + mTemplate.cols() + area.x, matchLoc.y + mTemplate.rows() + area.y);

    Imgproc.rectangle(mRgba, matchLoc_tx, matchLoc_ty, new Scalar(255, 255, 0, 255));
    Rect rec = new Rect(matchLoc_tx, matchLoc_ty);
  }
  public boolean onTouch(View v, MotionEvent event) {
    int cols = mRgba.cols();
    int rows = mRgba.rows();

    int xOffset = (mOpenCvCameraView.getWidth() - cols) / 2;
    int yOffset = (mOpenCvCameraView.getHeight() - rows) / 2;

    int x = (int) event.getX() - xOffset;
    int y = (int) event.getY() - yOffset;

    Log.i(TAG, "Touch image coordinates: (" + x + ", " + y + ")");

    if ((x < 0) || (y < 0) || (x > cols) || (y > rows)) return false;

    Rect touchedRect = new Rect();

    touchedRect.x = (x > 4) ? x - 4 : 0;
    touchedRect.y = (y > 4) ? y - 4 : 0;

    touchedRect.width = (x + 4 < cols) ? x + 4 - touchedRect.x : cols - touchedRect.x;
    touchedRect.height = (y + 4 < rows) ? y + 4 - touchedRect.y : rows - touchedRect.y;

    Mat touchedRegionRgba = mRgba.submat(touchedRect);

    Mat touchedRegionHsv = new Mat();
    Imgproc.cvtColor(touchedRegionRgba, touchedRegionHsv, Imgproc.COLOR_RGB2HSV_FULL);

    // Calculate average color of touched region
    mBlobColorHsv = Core.sumElems(touchedRegionHsv);
    int pointCount = touchedRect.width * touchedRect.height;
    for (int i = 0; i < mBlobColorHsv.val.length; i++) mBlobColorHsv.val[i] /= pointCount;

    mBlobColorRgba = converScalarHsv2Rgba(mBlobColorHsv);

    Log.i(
        TAG,
        "Touched rgba color: ("
            + mBlobColorRgba.val[0]
            + ", "
            + mBlobColorRgba.val[1]
            + ", "
            + mBlobColorRgba.val[2]
            + ", "
            + mBlobColorRgba.val[3]
            + ")");

    mDetector.setHsvColor(mBlobColorHsv);

    Imgproc.resize(mDetector.getSpectrum(), mSpectrum, SPECTRUM_SIZE);

    mIsColorSelected = true;
    final Handler handler = new Handler();
    Runnable runable =
        new Runnable() {

          @Override
          public void run() {
            try {
              // do your code here
              hehe.setText(abc.toString());
              if (abc > 16000 && abc < 25000) {
                imageView.setImageResource(R.drawable.green1);
              } else {
                imageView.setImageResource(R.drawable.red1);
              }
              // also call the same runnable
              handler.postDelayed(this, 1000);
            } catch (Exception e) {
              // TODO: handle exception
            } finally {
              // also call the same runnable
              // handler.postDelayed(this, 1000);
            }
          }
        };
    handler.postDelayed(runable, 1000);
    touchedRegionRgba.release();
    touchedRegionHsv.release();

    return false; // don't need subsequent touch events
  }
  public void hpFind(Mat mRgba, Mat mGray, WorkingHeadPose hp, Rect[] facesArray)
        // public void hpFind(Mat mRgba,Mat mGray,HeadPose hp)
      {

    int i;
    Log.i("HeadPose", "hpFind:Total Faces Found" + facesArray.length);
    if (hp.hpstatus == HeadPoseStatus.NONE) {
      if (facesArray.length < 1) return;
      TempFace = facesArray.clone();
      hp.cornerCount = hp.maxCorners;
      Rect roi =
          new Rect(
              (int) facesArray[0].tl().x,
              (int) (facesArray[0].tl().y),
              facesArray[0].width,
              (int) (facesArray[0].height)); // imran
      Mat cropped = new Mat();
      Mat GrayClone = new Mat();
      GrayClone = mGray.clone();
      cropped = GrayClone.submat(roi);
      hpFindCorners(cropped, hp);
      // mGray is untouched
      // ******** working fine upto here

      // Map face points to model

      if (hp.cornerCount < 4) return;

      Vector<Point3> points = new Vector<Point3>();
      if (hp.corners.total()
          > 0) // adding to make sure, copying is done perfectly.imran,was getting exception
      {
        Log.i("hpFind+", "hp.corners.total()" + hp.corners.total());
        Log.i("hpFind+", "hp.cornerCount" + hp.cornerCount);

        // Point3 temp1;//=new Point3();

        for (i = 0; i < hp.cornerCount; i++) {
          if (i == hp.corners.total()) break;
          // Log.i("hpFind+","Itertion"+i);
          points.add(
              new Point3(
                  (hp.corners.toList().get(i).x / facesArray[0].width) - 0.5,
                  -(hp.corners.toList().get(i).y / facesArray[0].height) + 0.5,
                  0.5 * Math.sin(PI * (hp.corners.toList().get(i).x / facesArray[0].width))));
          // modelPoints.toList().set(i,
          // hpmodel(hp.corners.toList().get(i).x/facesArray[0].width,hp.corners.toList().get(i).x/facesArray[0].height));
          // temp1=new Point3((hp.corners.toList().get(i).x/facesArray[0].width) -
          // 0.5,-(hp.corners.toList().get(i).y/facesArray[0].height) + 0.5, 0.5 * Math.sin(PI
          // *(hp.corners.toList().get(i).x/facesArray[0].width)));
          // modelPoints. .p  .toList().set .set(i,temp1);

        }
        modelPoints.fromList(points);
      }

      // imran example from marker.java , search for Point3f

      // Traslate corners from face coordinated to image coordinates
      for (i = 0; i < hp.cornerCount; i++) {
        if (i == hp.corners.total()) break;
        hp.corners.toList().get(i).x += facesArray[0].tl().x;
        hp.corners.toList().get(i).y += facesArray[0].br().y;
        // hp.corners.toList().set(i, hp.corners.toList().get(i)+facesArray[0].tl().x);

      }
      hp.corners.copyTo(hp.tempcorners); // .clone();
      // Change status
      hp.hpstatus = HeadPoseStatus.KEYFRAME;
    } else {
      if (facesArray.length > 1)
        TempFace = facesArray.clone(); // imran assigning here also,to better measure

      MatOfPoint2f corners2f = new MatOfPoint2f();
      hp.corners.convertTo(corners2f, CvType.CV_32FC2);
      hp.previousCorners = corners2f;
      corners2f.convertTo(hp.corners, CvType.CV_32S);

      hpTrack(mRgba, hp, facesArray);
      Point center = new Point();

      if (hp.cornerCount < 4) {
        hp.hpstatus = HeadPoseStatus.NONE;
        return;
      }

      hp.hpstatus = HeadPoseStatus.TRACKING;
    }
    if (hp.previousFrame == null) // imran
    {
      // hp.previousFrame =new Mat(mRgba.width(),mRgba.height(),CvType);
      hp.previousFrame = new Mat(mRgba.size(), CvType.CV_8UC4);
    }
    mRgba.copyTo(hp.previousFrame);
    // cvCopy(frame, headPose->previousFrame, NULL);

  }
 private Mat cloneMarkerImageSegment(Mat imgMat) {
   Rect rect = calculateImageSegmentArea(imgMat);
   Mat calculatedImg = imgMat.submat(rect.y, rect.y + rect.height, rect.x, rect.x + rect.width);
   return calculatedImg.clone();
 }
Пример #18
0
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
    }

    if (mZoomWindow == null || mZoomWindow2 == null) CreateAuxiliaryMats();

    MatOfRect faces = new MatOfRect();

    if (mJavaDetector != null)
      mJavaDetector.detectMultiScale(
          mGray,
          faces,
          1.1,
          2,
          2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
          new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
          new Size());

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++) {
      Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
      xCenter = (facesArray[i].x + facesArray[i].width + facesArray[i].x) / 2;
      yCenter = (facesArray[i].y + facesArray[i].y + facesArray[i].height) / 2;
      Point center = new Point(xCenter, yCenter);

      Imgproc.circle(mRgba, center, 10, new Scalar(255, 0, 0, 255), 3);

      Imgproc.putText(
          mRgba,
          "[" + center.x + "," + center.y + "]",
          new Point(center.x + 20, center.y + 20),
          Core.FONT_HERSHEY_SIMPLEX,
          0.7,
          new Scalar(255, 255, 255, 255));

      Rect r = facesArray[i];
      // compute the eye area
      Rect eyearea =
          new Rect(
              r.x + r.width / 8,
              (int) (r.y + (r.height / 4.5)),
              r.width - 2 * r.width / 8,
              (int) (r.height / 3.0));
      // split it
      Rect eyearea_right =
          new Rect(
              r.x + r.width / 16,
              (int) (r.y + (r.height / 4.5)),
              (r.width - 2 * r.width / 16) / 2,
              (int) (r.height / 3.0));
      Rect eyearea_left =
          new Rect(
              r.x + r.width / 16 + (r.width - 2 * r.width / 16) / 2,
              (int) (r.y + (r.height / 4.5)),
              (r.width - 2 * r.width / 16) / 2,
              (int) (r.height / 3.0));
      // draw the area - mGray is working grayscale mat, if you want to
      // see area in rgb preview, change mGray to mRgba
      Imgproc.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(), new Scalar(255, 0, 0, 255), 2);
      Imgproc.rectangle(
          mRgba, eyearea_right.tl(), eyearea_right.br(), new Scalar(255, 0, 0, 255), 2);

      if (learn_frames < 5) {
        teplateR = get_template(mJavaDetectorEye, eyearea_right, 24);
        teplateL = get_template(mJavaDetectorEye, eyearea_left, 24);
        learn_frames++;
      } else {
        // Learning finished, use the new templates for template
        // matching
        match_eye(eyearea_right, teplateR, method);
        match_eye(eyearea_left, teplateL, method);
      }

      // cut eye areas and put them to zoom windows
      Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2, mZoomWindow2.size());
      Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow, mZoomWindow.size());
    }

    return mRgba;
  }