public static void main(String[] args) {

    // 指定读出的图片路径和输出的文件
    String inputImagePath =
        identificate.class.getClassLoader().getResource("hf.jpg").getPath().substring(1);
    String outputImageFile = "identificate.png";

    String xmlPath =
        identificate
            .class
            .getClassLoader()
            .getResource("cascade_storage.xml")
            .getPath()
            .substring(1);
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    CascadeClassifier faceDetector = new CascadeClassifier(xmlPath);
    Mat image = Highgui.imread(inputImagePath);
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    // 画出脸的位置
    for (Rect rect : faceDetections.toArray()) {
      Core.rectangle(
          image,
          new Point(rect.x, rect.y),
          new Point(rect.x + rect.width, rect.y + rect.height),
          new Scalar(0, 0, 255));
    }

    // 写入到文件
    Highgui.imwrite(outputImageFile, image);

    System.out.print("\nOK!");
  }
  @Override
  public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {

    mRgba = inputFrame.rgba();
    Imgproc.cvtColor(mRgba, grayScaleImage, Imgproc.COLOR_RGBA2RGB);

    MatOfRect faces = new MatOfRect();

    // detect faces
    if (cascadeClassifier != null) {
      cascadeClassifier.detectMultiScale(
          grayScaleImage,
          faces,
          1.1,
          2,
          2,
          new Size(absoluteFaceSize, absoluteFaceSize),
          new Size());
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
      Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), new Scalar(0, 255, 0, 255), 3);

    if (facesArray.length > 0) {
      facesInASecond.add(true);
    } else {
      facesInASecond.add(false);
    }

    return mRgba;
  }
  // OpenCV code
  private void modifyImage(String fileName) {
    // Create a face detector from the cascade file
    CascadeClassifier faceDetector = new CascadeClassifier("haarcascade_frontalface_alt.xml");
    Mat image = Highgui.imread(fileName);

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    // Blur each face
    for (Rect rect : faceDetections.toArray()) {
      Mat faceArea = image.submat(rect);
      Imgproc.blur(faceArea, faceArea, new Size(30, 30));
    }
    // Save the modified image
    Highgui.imwrite("edited_" + fileName, image);
  }
Beispiel #4
0
  /** Called when the activity is first created. */
  @Override
  public void onCreate(Bundle savedInstanceState) {
    Log.i(TAG, "called onCreate");
    super.onCreate(savedInstanceState);
    getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);

    setContentView(R.layout.face_detect_surface_view);

    // //
    try {
      // load cascade file from application resources
      InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);
      File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
      mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
      FileOutputStream os = new FileOutputStream(mCascadeFile);

      byte[] buffer = new byte[4096];
      int bytesRead;
      while ((bytesRead = is.read(buffer)) != -1) {
        os.write(buffer, 0, bytesRead);
      }
      is.close();
      os.close();

      mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
      if (mJavaDetector.empty()) {
        Log.e(TAG, "Failed to load cascade classifier");
        mJavaDetector = null;
      } else Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());

      mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0); // hujiawei

      cascadeDir.delete();

    } catch (IOException e) {
      e.printStackTrace();
      Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
    }

    mOpenCvCameraView = (CameraBridgeViewBase) findViewById(R.id.fd_activity_surface_view);
    mOpenCvCameraView.enableView(); //
    mOpenCvCameraView.setCvCameraViewListener(this);
  }
Beispiel #5
0
  private Mat get_template(CascadeClassifier clasificator, Rect area, int size) {
    Mat template = new Mat();
    Mat mROI = mGray.submat(area);
    MatOfRect eyes = new MatOfRect();
    Point iris = new Point();
    Rect eye_template = new Rect();
    clasificator.detectMultiScale(
        mROI,
        eyes,
        1.15,
        2,
        Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE,
        new Size(30, 30),
        new Size());

    Rect[] eyesArray = eyes.toArray();
    for (int i = 0; i < eyesArray.length; ) {
      Rect e = eyesArray[i];
      e.x = area.x + e.x;
      e.y = area.y + e.y;
      Rect eye_only_rectangle =
          new Rect(
              (int) e.tl().x,
              (int) (e.tl().y + e.height * 0.4),
              (int) e.width,
              (int) (e.height * 0.6));
      mROI = mGray.submat(eye_only_rectangle);
      Mat vyrez = mRgba.submat(eye_only_rectangle);

      Core.MinMaxLocResult mmG = Core.minMaxLoc(mROI);

      Imgproc.circle(vyrez, mmG.minLoc, 2, new Scalar(255, 255, 255, 255), 2);
      iris.x = mmG.minLoc.x + eye_only_rectangle.x;
      iris.y = mmG.minLoc.y + eye_only_rectangle.y;
      eye_template = new Rect((int) iris.x - size / 2, (int) iris.y - size / 2, size, size);
      Imgproc.rectangle(mRgba, eye_template.tl(), eye_template.br(), new Scalar(255, 0, 0, 255), 2);
      template = (mGray.submat(eye_template)).clone();
      return template;
    }
    return template;
  }
Beispiel #6
0
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
      mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
      if (mJavaDetector != null)
        mJavaDetector.detectMultiScale(
            mGray,
            faces,
            1.1,
            2,
            2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
            new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
            new Size());
    } else if (mDetectorType == NATIVE_DETECTOR) {
      if (mNativeDetector != null) mNativeDetector.detect(mGray, faces);
    } else {
      Log.e(TAG, "Detection method is not selected!");
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
      Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

    return mRgba;
  }
Beispiel #7
0
  public FdView(Context context) {
    super(context);

    try {
      InputStream is = context.getResources().openRawResource(R.raw.lbpcascade_frontalface);
      File cascadeDir = context.getDir("cascade", Context.MODE_PRIVATE);
      mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
      FileOutputStream os = new FileOutputStream(mCascadeFile);

      byte[] buffer = new byte[4096];
      int bytesRead;
      while ((bytesRead = is.read(buffer)) != -1) {
        os.write(buffer, 0, bytesRead);
      }
      is.close();
      os.close();

      // --------------------------------- load left eye classificator
      // -----------------------------------
      InputStream iser = context.getResources().openRawResource(R.raw.haarcascade_lefteye_2splits);
      File cascadeDirER = context.getDir("cascadeER", Context.MODE_PRIVATE);
      File cascadeFileER = new File(cascadeDirER, "haarcascade_eye_right.xml");
      FileOutputStream oser = new FileOutputStream(cascadeFileER);

      byte[] bufferER = new byte[4096];
      int bytesReadER;
      while ((bytesReadER = iser.read(bufferER)) != -1) {
        oser.write(bufferER, 0, bytesReadER);
      }
      iser.close();
      oser.close();
      // ----------------------------------------------------------------------------------------------------

      // --------------------------------- load right eye classificator
      // ------------------------------------
      InputStream isel = context.getResources().openRawResource(R.raw.haarcascade_lefteye_2splits);
      File cascadeDirEL = context.getDir("cascadeEL", Context.MODE_PRIVATE);
      File cascadeFileEL = new File(cascadeDirEL, "haarcascade_eye_left.xml");
      FileOutputStream osel = new FileOutputStream(cascadeFileEL);

      byte[] bufferEL = new byte[4096];
      int bytesReadEL;
      while ((bytesReadEL = isel.read(bufferEL)) != -1) {
        osel.write(bufferEL, 0, bytesReadEL);
      }
      isel.close();
      osel.close();

      // ------------------------------------------------------------------------------------------------------

      mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
      mCascadeER = new CascadeClassifier(cascadeFileER.getAbsolutePath());
      mCascadeEL = new CascadeClassifier(cascadeFileER.getAbsolutePath());
      if (mJavaDetector.empty() || mCascadeER.empty() || mCascadeEL.empty()) {
        Log.e(TAG, "Failed to load cascade classifier");
        mJavaDetector = null;
        mCascadeER = null;
        mCascadeEL = null;
      } else Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());

      mNativeDetector = new DetectionBasedTracker(mCascadeFile.getAbsolutePath(), 0);

      cascadeDir.delete();
      cascadeFileER.delete();
      cascadeDirER.delete();
      cascadeFileEL.delete();
      cascadeDirEL.delete();

    } catch (IOException e) {
      e.printStackTrace();
      Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
    }
  }
Beispiel #8
0
  @Override
  protected Bitmap processFrame(VideoCapture capture) {
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) ;
      {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
      mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
      if (mJavaDetector != null)
        mJavaDetector.detectMultiScale(
            mGray,
            faces,
            1.1,
            2,
            2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
            ,
            new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
            new Size());

      if (mZoomCorner == null || mZoomWindow == null) CreateAuxiliaryMats();

      Rect[] facesArray = faces.toArray();

      for (int i = 0; i < facesArray.length; i++) {
        Rect r = facesArray[i];
        Core.rectangle(mGray, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
        Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);

        eyearea =
            new Rect(
                r.x + r.width / 8,
                (int) (r.y + (r.height / 4.5)),
                r.width - 2 * r.width / 8,
                (int) (r.height / 3.0));
        Core.rectangle(mRgba, eyearea.tl(), eyearea.br(), new Scalar(255, 0, 0, 255), 2);
        Rect eyearea_right =
            new Rect(
                r.x + r.width / 16,
                (int) (r.y + (r.height / 4.5)),
                (r.width - 2 * r.width / 16) / 2,
                (int) (r.height / 3.0));
        Rect eyearea_left =
            new Rect(
                r.x + r.width / 16 + (r.width - 2 * r.width / 16) / 2,
                (int) (r.y + (r.height / 4.5)),
                (r.width - 2 * r.width / 16) / 2,
                (int) (r.height / 3.0));
        Core.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(), new Scalar(255, 0, 0, 255), 2);
        Core.rectangle(
            mRgba, eyearea_right.tl(), eyearea_right.br(), new Scalar(255, 0, 0, 255), 2);

        if (learn_frames < 5) {
          teplateR = get_template(mCascadeER, eyearea_right, 24);
          teplateL = get_template(mCascadeEL, eyearea_left, 24);
          learn_frames++;
        } else {

          match_value = match_eye(eyearea_right, teplateR, FdActivity.method);

          match_value = match_eye(eyearea_left, teplateL, FdActivity.method);
          ;
        }
        Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2, mZoomWindow2.size());
        Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow, mZoomWindow.size());
      }
    } else if (mDetectorType == NATIVE_DETECTOR) {
      if (mNativeDetector != null) mNativeDetector.detect(mGray, faces);
    } else {
      Log.e(TAG, "Detection method is not selected!");
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
      Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

    try {
      Utils.matToBitmap(mRgba, bmp);
    } catch (Exception e) {
      Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
      bmp.recycle();
      bmp = null;
    }

    return bmp;
  }
Beispiel #9
0
  public void run() {
    System.out.println("\nRunning DetectFaceDemo");

    // Create a face detector from the cascade file in the resources
    // directory.
    // String facefilterpath =
    // getClass().getResource("../resources/haarcascade_mcs_eyepair_big.xml").getPath();
    String facefilterpath = getClass().getResource("../resources/haarcascade_eye.xml").getPath();
    facefilterpath = facefilterpath.substring(1, facefilterpath.length());
    CascadeClassifier faceDetector = new CascadeClassifier(facefilterpath);
    String pngpath = getClass().getResource("../resources/brown_eyes.jpg").getPath();
    pngpath = pngpath.substring(1, pngpath.length());
    Mat image = Highgui.imread(pngpath);

    // Detect faces in the ismage.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    Mat image2 = image;

    Imgproc.cvtColor(image2, image, 6); // 6 = CV_BGR2GRAY not working
    Imgproc.GaussianBlur(image, image, new Size(7, 7), 4, 4);
    // Imgproc.medianBlur(image,image, 2);
    MatOfPoint3f circles = new MatOfPoint3f();
    MatOfPoint3f circles2 = new MatOfPoint3f();

    Imgproc.HoughCircles(
        image, circles, Imgproc.CV_HOUGH_GRADIENT, 5, image.rows() / 5, 100, 100, 10, 50);

    Imgproc.HoughCircles(
        image, circles2, Imgproc.CV_HOUGH_GRADIENT, 5, image.rows() / 5, 100, 100, 50, 400);

    Imgproc.cvtColor(image, image, 8); // 6 = CV_BGR2GRAY not working

    System.out.println(String.format("Detected %s faces", faceDetections));
    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
      // Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y +
      // rect.height), new Scalar(0, 255, 0),100);
    }

    System.out.println(String.format("Detected %s circles", circles.total()));

    for (Point3 circle : circles.toArray()) {
      Point center = new Point(circle.x, circle.y);
      int radius = (int) Math.round(circle.z);
      Core.circle(image, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
      Core.circle(image, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
      // Core.circle(image, center, radius, new Scalar(0,255,0), 10,8, 0);
    }
    for (Point3 circle : circles2.toArray()) {
      Point center = new Point(circle.x, circle.y);
      int radius = (int) Math.round(circle.z);
      Core.circle(image, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
      Core.circle(image, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
      // Core.circle(image, center, radius, new Scalar(0,255,0), 10,8, 0);
    }

    // Core.circle(image, new Point(100,100), 10, new Scalar(0,255,0), 10, 8, 0);
    // Save the visualized detection.

    String filename = "faceDetection.png";
    System.out.println(String.format("Writing %s", filename));
    Highgui.imwrite(filename, image);
  }
Beispiel #10
0
        @Override
        public void onManagerConnected(int status) {
          switch (status) {
            case LoaderCallbackInterface.SUCCESS:
              {
                Log.i(TAG, "OpenCV loaded successfully");

                try {
                  // load cascade file from application resources
                  InputStream is = getResources().openRawResource(R.raw.lbpcascade_frontalface);
                  File cascadeDir = getDir("cascade", Context.MODE_PRIVATE);
                  mCascadeFile = new File(cascadeDir, "lbpcascade_frontalface.xml");
                  FileOutputStream os = new FileOutputStream(mCascadeFile);

                  byte[] buffer = new byte[4096];
                  int bytesRead;
                  while ((bytesRead = is.read(buffer)) != -1) {
                    os.write(buffer, 0, bytesRead);
                  }
                  is.close();
                  os.close();

                  // --------------------------------- load left eye
                  // classificator -----------------------------------
                  InputStream iser =
                      getResources().openRawResource(R.raw.haarcascade_lefteye_2splits);
                  File cascadeDirER = getDir("cascadeER", Context.MODE_PRIVATE);
                  File cascadeFileER = new File(cascadeDirER, "haarcascade_eye_right.xml");
                  FileOutputStream oser = new FileOutputStream(cascadeFileER);

                  byte[] bufferER = new byte[4096];
                  int bytesReadER;
                  while ((bytesReadER = iser.read(bufferER)) != -1) {
                    oser.write(bufferER, 0, bytesReadER);
                  }
                  iser.close();
                  oser.close();

                  mJavaDetector = new CascadeClassifier(mCascadeFile.getAbsolutePath());
                  if (mJavaDetector.empty()) {
                    Log.e(TAG, "Failed to load cascade classifier");
                    mJavaDetector = null;
                  } else
                    Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());

                  mJavaDetectorEye = new CascadeClassifier(cascadeFileER.getAbsolutePath());
                  if (mJavaDetectorEye.empty()) {
                    Log.e(TAG, "Failed to load cascade classifier");
                    mJavaDetectorEye = null;
                  } else
                    Log.i(TAG, "Loaded cascade classifier from " + mCascadeFile.getAbsolutePath());

                  cascadeDir.delete();

                } catch (IOException e) {
                  e.printStackTrace();
                  Log.e(TAG, "Failed to load cascade. Exception thrown: " + e);
                }
                mOpenCvCameraView.setCameraIndex(1);
                mOpenCvCameraView.enableFpsMeter();
                mOpenCvCameraView.enableView();
              }
              break;
            default:
              {
                super.onManagerConnected(status);
              }
              break;
          }
        }
Beispiel #11
0
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
    }

    if (mZoomWindow == null || mZoomWindow2 == null) CreateAuxiliaryMats();

    MatOfRect faces = new MatOfRect();

    if (mJavaDetector != null)
      mJavaDetector.detectMultiScale(
          mGray,
          faces,
          1.1,
          2,
          2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
          new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
          new Size());

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++) {
      Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
      xCenter = (facesArray[i].x + facesArray[i].width + facesArray[i].x) / 2;
      yCenter = (facesArray[i].y + facesArray[i].y + facesArray[i].height) / 2;
      Point center = new Point(xCenter, yCenter);

      Imgproc.circle(mRgba, center, 10, new Scalar(255, 0, 0, 255), 3);

      Imgproc.putText(
          mRgba,
          "[" + center.x + "," + center.y + "]",
          new Point(center.x + 20, center.y + 20),
          Core.FONT_HERSHEY_SIMPLEX,
          0.7,
          new Scalar(255, 255, 255, 255));

      Rect r = facesArray[i];
      // compute the eye area
      Rect eyearea =
          new Rect(
              r.x + r.width / 8,
              (int) (r.y + (r.height / 4.5)),
              r.width - 2 * r.width / 8,
              (int) (r.height / 3.0));
      // split it
      Rect eyearea_right =
          new Rect(
              r.x + r.width / 16,
              (int) (r.y + (r.height / 4.5)),
              (r.width - 2 * r.width / 16) / 2,
              (int) (r.height / 3.0));
      Rect eyearea_left =
          new Rect(
              r.x + r.width / 16 + (r.width - 2 * r.width / 16) / 2,
              (int) (r.y + (r.height / 4.5)),
              (r.width - 2 * r.width / 16) / 2,
              (int) (r.height / 3.0));
      // draw the area - mGray is working grayscale mat, if you want to
      // see area in rgb preview, change mGray to mRgba
      Imgproc.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(), new Scalar(255, 0, 0, 255), 2);
      Imgproc.rectangle(
          mRgba, eyearea_right.tl(), eyearea_right.br(), new Scalar(255, 0, 0, 255), 2);

      if (learn_frames < 5) {
        teplateR = get_template(mJavaDetectorEye, eyearea_right, 24);
        teplateL = get_template(mJavaDetectorEye, eyearea_left, 24);
        learn_frames++;
      } else {
        // Learning finished, use the new templates for template
        // matching
        match_eye(eyearea_right, teplateR, method);
        match_eye(eyearea_left, teplateL, method);
      }

      // cut eye areas and put them to zoom windows
      Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2, mZoomWindow2.size());
      Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow, mZoomWindow.size());
    }

    return mRgba;
  }