Esempio n. 1
0
  private Mat get_template(CascadeClassifier clasificator, Rect area, int size) {
    Mat template = new Mat();
    Mat mROI = mGray.submat(area);
    MatOfRect eyes = new MatOfRect();
    Point iris = new Point();
    Rect eye_template = new Rect();
    clasificator.detectMultiScale(
        mROI,
        eyes,
        1.15,
        2,
        Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE,
        new Size(30, 30),
        new Size());

    Rect[] eyesArray = eyes.toArray();
    for (int i = 0; i < eyesArray.length; ) {
      Rect e = eyesArray[i];
      e.x = area.x + e.x;
      e.y = area.y + e.y;
      Rect eye_only_rectangle =
          new Rect(
              (int) e.tl().x,
              (int) (e.tl().y + e.height * 0.4),
              (int) e.width,
              (int) (e.height * 0.6));
      mROI = mGray.submat(eye_only_rectangle);
      Mat vyrez = mRgba.submat(eye_only_rectangle);

      Core.MinMaxLocResult mmG = Core.minMaxLoc(mROI);

      Imgproc.circle(vyrez, mmG.minLoc, 2, new Scalar(255, 255, 255, 255), 2);
      iris.x = mmG.minLoc.x + eye_only_rectangle.x;
      iris.y = mmG.minLoc.y + eye_only_rectangle.y;
      eye_template = new Rect((int) iris.x - size / 2, (int) iris.y - size / 2, size, size);
      Imgproc.rectangle(mRgba, eye_template.tl(), eye_template.br(), new Scalar(255, 0, 0, 255), 2);
      template = (mGray.submat(eye_template)).clone();
      return template;
    }
    return template;
  }
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    // convert to Gray scale
    Imgproc.cvtColor(mRgba, img_gray, Imgproc.COLOR_BGR2GRAY);
    // make it binary with threshold=100
    Imgproc.threshold(img_gray, erd, 100, 255, Imgproc.THRESH_OTSU);
    // remove pixel noise by "erode" with structuring element of 9X9
    Mat erode = Imgproc.getStructuringElement(Imgproc.MORPH_ERODE, new Size(9, 9));
    Imgproc.erode(erd, tgt, erode);
    // apply "dilation" to enlarge object
    Mat dilate = Imgproc.getStructuringElement(Imgproc.MORPH_DILATE, new Size(9, 9));
    Imgproc.dilate(tgt, erd, dilate);
    // take a window
    Size s = erd.size();
    int W = (int) s.width;
    int H = (int) s.height;
    Rect r = new Rect(0, H / 2 - 100, W, 200);
    Mat mask = new Mat(s, CvType.CV_8UC1, new Scalar(0, 0, 0));
    rectangle(mask, r.tl(), r.br(), new Scalar(255, 255, 255), -1);
    erd.copyTo(window, mask);

    // find the contours
    Imgproc.findContours(window, contours, dest, 0, 2);
    // find largest contour
    int maxContour = -1;
    double area = 0;
    for (int i = 0; i < contours.size(); i++) {
      double contArea = Imgproc.contourArea(contours.get(i));
      if (contArea > area) {
        area = contArea;
        maxContour = i;
      }
    }
    // form bounding rectangle for largest contour
    Rect rect = null;
    if (maxContour > -1) rect = Imgproc.boundingRect(contours.get(maxContour));
    // position to center
    while (!train) {
      if (rect != null) {
        Imgproc.rectangle(mRgba, rect.tl(), rect.br(), new Scalar(255, 255, 255), 5);
        if ((rect.x + rect.width / 2) > W / 2 - 20 && (rect.x + rect.width / 2) < W / 2 + 20) {
          runOnUiThread(
              new Runnable() {
                @Override
                public void run() {
                  Toast.makeText(getApplicationContext(), "OK", Toast.LENGTH_SHORT).show();
                }
              });
          train = true;
        }
      }
      if (contours != null) contours.clear();
      return mRgba;
    }
    if (train) {
      if (rect != null) {
        Imgproc.rectangle(mRgba, rect.tl(), rect.br(), new Scalar(255, 255, 255), 5);
        // direction of movement
        int thr = 100;
        if ((rect.x + rect.width / 2) < (W / 2 - thr)) {
          // move to the RIGHT
          uHandler.obtainMessage(MainActivity.LEFT).sendToTarget();
        } else {
          if ((rect.x + rect.width / 2) > (W / 2 + thr)) {
            uHandler.obtainMessage(MainActivity.RIGHT).sendToTarget();
          } else {
            uHandler.obtainMessage(MainActivity.FORWARD).sendToTarget();
          }
        }
      } else {
        // stop moving
        uHandler.obtainMessage(MainActivity.STOP).sendToTarget();
      }
    }
    if (contours != null) contours.clear();
    return mRgba;
  }
Esempio n. 3
0
  private void match_eye(Rect area, Mat mTemplate, int type) {
    Point matchLoc;
    Mat mROI = mGray.submat(area);
    int result_cols = mROI.cols() - mTemplate.cols() + 1;
    int result_rows = mROI.rows() - mTemplate.rows() + 1;
    // Check for bad template size
    if (mTemplate.cols() == 0 || mTemplate.rows() == 0) {
      return;
    }
    Mat mResult = new Mat(result_cols, result_rows, CvType.CV_8U);
    long nbPixels = (mResult.rows() * mResult.cols()) - getBlackPixels(mResult);
    if (Math.abs(nbPixels) < 2000) {
      final MediaPlayer mp = new MediaPlayer();
      try {
        mp.reset();
        AssetFileDescriptor afd;
        afd = getAssets().openFd("wakeUp.mp3");
        mp.setDataSource(afd.getFileDescriptor(), afd.getStartOffset(), afd.getLength());
        mp.prepare();
        mp.start();
        Thread.sleep(60000);
      } catch (IllegalStateException e) {
        e.printStackTrace();
      } catch (IOException e) {
        e.printStackTrace();
      } catch (InterruptedException e) {
        e.printStackTrace();
      }

      Log.i("You are sleeping", "YOU SLEPT");
    } else Log.i("M_match_eye: else ", "nbPixels = " + nbPixels);

    switch (type) {
      case TM_SQDIFF:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_SQDIFF);
        break;
      case TM_SQDIFF_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_SQDIFF_NORMED);
        break;
      case TM_CCOEFF:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCOEFF);
        break;
      case TM_CCOEFF_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCOEFF_NORMED);
        break;
      case TM_CCORR:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCORR);
        break;
      case TM_CCORR_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCORR_NORMED);
        break;
    }

    Core.MinMaxLocResult mmres = Core.minMaxLoc(mResult);
    // there is difference in matching methods - best match is max/min value
    if (type == TM_SQDIFF || type == TM_SQDIFF_NORMED) {
      matchLoc = mmres.minLoc;
    } else {
      matchLoc = mmres.maxLoc;
    }

    Point matchLoc_tx = new Point(matchLoc.x + area.x, matchLoc.y + area.y);
    Point matchLoc_ty =
        new Point(matchLoc.x + mTemplate.cols() + area.x, matchLoc.y + mTemplate.rows() + area.y);

    Imgproc.rectangle(mRgba, matchLoc_tx, matchLoc_ty, new Scalar(255, 255, 0, 255));
    Rect rec = new Rect(matchLoc_tx, matchLoc_ty);
  }
Esempio n. 4
0
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
    }

    if (mZoomWindow == null || mZoomWindow2 == null) CreateAuxiliaryMats();

    MatOfRect faces = new MatOfRect();

    if (mJavaDetector != null)
      mJavaDetector.detectMultiScale(
          mGray,
          faces,
          1.1,
          2,
          2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
          new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
          new Size());

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++) {
      Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
      xCenter = (facesArray[i].x + facesArray[i].width + facesArray[i].x) / 2;
      yCenter = (facesArray[i].y + facesArray[i].y + facesArray[i].height) / 2;
      Point center = new Point(xCenter, yCenter);

      Imgproc.circle(mRgba, center, 10, new Scalar(255, 0, 0, 255), 3);

      Imgproc.putText(
          mRgba,
          "[" + center.x + "," + center.y + "]",
          new Point(center.x + 20, center.y + 20),
          Core.FONT_HERSHEY_SIMPLEX,
          0.7,
          new Scalar(255, 255, 255, 255));

      Rect r = facesArray[i];
      // compute the eye area
      Rect eyearea =
          new Rect(
              r.x + r.width / 8,
              (int) (r.y + (r.height / 4.5)),
              r.width - 2 * r.width / 8,
              (int) (r.height / 3.0));
      // split it
      Rect eyearea_right =
          new Rect(
              r.x + r.width / 16,
              (int) (r.y + (r.height / 4.5)),
              (r.width - 2 * r.width / 16) / 2,
              (int) (r.height / 3.0));
      Rect eyearea_left =
          new Rect(
              r.x + r.width / 16 + (r.width - 2 * r.width / 16) / 2,
              (int) (r.y + (r.height / 4.5)),
              (r.width - 2 * r.width / 16) / 2,
              (int) (r.height / 3.0));
      // draw the area - mGray is working grayscale mat, if you want to
      // see area in rgb preview, change mGray to mRgba
      Imgproc.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(), new Scalar(255, 0, 0, 255), 2);
      Imgproc.rectangle(
          mRgba, eyearea_right.tl(), eyearea_right.br(), new Scalar(255, 0, 0, 255), 2);

      if (learn_frames < 5) {
        teplateR = get_template(mJavaDetectorEye, eyearea_right, 24);
        teplateL = get_template(mJavaDetectorEye, eyearea_left, 24);
        learn_frames++;
      } else {
        // Learning finished, use the new templates for template
        // matching
        match_eye(eyearea_right, teplateR, method);
        match_eye(eyearea_left, teplateL, method);
      }

      // cut eye areas and put them to zoom windows
      Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2, mZoomWindow2.size());
      Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow, mZoomWindow.size());
    }

    return mRgba;
  }