public static void detect(IplImage src) { CvHaarClassifierCascade cascade = new CvHaarClassifierCascade(cvLoad(XML_FILE)); CvMemStorage storage = CvMemStorage.create(); CvSeq sign = cvHaarDetectObjects(src, cascade, storage, 1.5, 3, CV_HAAR_DO_CANNY_PRUNING); cvClearMemStorage(storage); int total_Faces = sign.total(); for (int i = 0; i < total_Faces; i++) { CvRect r = new CvRect(cvGetSeqElem(sign, i)); cvRectangle( src, cvPoint(r.x(), r.y()), cvPoint(r.width() + r.x(), r.height() + r.y()), CvScalar.RED, 2, CV_AA, 0); } cvShowImage("Result", src); cvSaveImage("D:\\asd\\a.jpg", src); cvWaitKey(0); }
/** * Receives frame and uses face recognition algorithms to set coordinates of faces * * @param originalImage * @return list of integer arrays, with coordinates and size of faces */ public static List<Integer[]> detect(IplImage originalImage) { List<Integer[]> facesList = new ArrayList<Integer[]>(); IplImage grayImage = IplImage.create(originalImage.width(), originalImage.height(), IPL_DEPTH_8U, 1); cvCvtColor(originalImage, grayImage, CV_BGR2GRAY); CvMemStorage storage = CvMemStorage.create(); opencv_objdetect.CvHaarClassifierCascade cascade = new opencv_objdetect.CvHaarClassifierCascade(cvLoad(CASCADE_FILE)); CvSeq faces = cvHaarDetectObjects(grayImage, cascade, storage, 1.1, 1, 0); Integer[] coordinates = null; for (int i = 0; i < faces.total(); i++) { CvRect r = new CvRect(cvGetSeqElem(faces, i)); coordinates = new Integer[4]; coordinates[0] = r.x(); coordinates[1] = r.y(); coordinates[2] = r.height(); coordinates[3] = r.width(); facesList.add(coordinates); } return facesList; }
public void generatePGMFromPic(String srcPath, String file, String destPath) throws Exception { String srcFilePath = srcPath + "/" + file; System.out.println("Loading image from " + srcFilePath); IplImage origImg = cvLoadImage(srcFilePath); // convert to grayscale IplImage grayImg = IplImage.create(origImg.width(), origImg.height(), IPL_DEPTH_8U, 1); cvCvtColor(origImg, grayImg, CV_BGR2GRAY); // scale the grayscale (to speed up face detection) IplImage smallImg = IplImage.create(grayImg.width() / SCALE, grayImg.height() / SCALE, IPL_DEPTH_8U, 1); cvResize(grayImg, smallImg, CV_INTER_LINEAR); // equalize the small grayscale IplImage equImg = IplImage.create(smallImg.width(), smallImg.height(), IPL_DEPTH_8U, 1); cvEqualizeHist(smallImg, equImg); CvMemStorage storage = CvMemStorage.create(); CvHaarClassifierCascade cascade = new CvHaarClassifierCascade(cvLoad(CASCADE_FILE)); System.out.println("Detecting faces..."); CvSeq faces = cvHaarDetectObjects(equImg, cascade, storage, 1.1, 3, CV_HAAR_DO_CANNY_PRUNING); cvClearMemStorage(storage); int total = faces.total(); System.out.println("Found " + total + " face(s)"); for (int i = 0; i < total; i++) { CvRect r = new CvRect(cvGetSeqElem(faces, i)); cvSetImageROI( origImg, cvRect(r.x() * SCALE, r.y() * SCALE, r.width() * SCALE, r.height() * SCALE)); IplImage origface = cvCreateImage(cvSize(r.width() * SCALE, r.height() * SCALE), 8, 3); IplImage smallface = cvCreateImage(cvSize(120, 120), 8, 3); cvCopy(origImg, origface); cvResize(origface, smallface, CV_INTER_LINEAR); cvSaveImage(destPath + "/" + file + i + ".pgm", smallface); cvResetImageROI(origImg); } }
public IplImage DetectFaces(IplImage image) throws Exception { // Converts the image to gray scale for detection to work, using the same dimensions as the // original. IplImage grayImage = IplImage.createFrom(convertColorToGray(image.getBufferedImage())); CvMemStorage storage = CvMemStorage.create(); // Using the cascade file, this creates a classification for what objects to detect. In our case // it is the anterior of the face. CvHaarClassifierCascade classifier = new CvHaarClassifierCascade(cvLoad(CASCADE_FILE)); // Detect Haar-like objects, depending on the classifier. In this case we use a classifier for // detecting the anterior of the face. CvSeq faces = cvHaarDetectObjects(grayImage, classifier, storage, 1.1, 1, 0); // Initialize the static variables in FaceScanner for determining the area to crop the largest // detected face. FaceScanner.height = 0; FaceScanner.width = 0; FaceScanner.x = 0; FaceScanner.y = 0; // Loop through all detected faces and save the largest (closest) face. for (int i = 0; i < faces.total(); i++) { CvRect rect = new CvRect(cvGetSeqElem(faces, i)); if (FaceScanner.width < rect.width()) { FaceScanner.width = rect.width(); FaceScanner.height = rect.height(); FaceScanner.x = rect.x(); FaceScanner.y = rect.y(); } if (FaceScanner.displayRects) { /*Uncomment to draw the rectangles around the detected faces.*/ // if(rect.width() > 130 && rect.height() > 130){ // Draw a square around the detected face. cvRectangle( image, cvPoint(rect.x(), rect.y()), cvPoint(rect.x() + rect.width(), rect.y() + rect.height()), CvScalar.GREEN, 2, CV_AA, 0); // } /*-----------------------------------------------------------*/ } } // Checks that there was a detected face in the image before saving. Also, the detected "face" // must be large enough to be considered // a detected face. This is to limit the amount of erroneous detections. This saves the full // size image with detections drawn on // whole image before cropping. if (!(FaceScanner.height == 0 && FaceScanner.width == 0) && !(FaceScanner.height < 130 && FaceScanner.width < 130)) { // Save the image with rectangles. // cvSaveImage(filename.replace(".png", "-Rect.png"), image); } else { return null; } return image; }