/** * Find the most likely person based on a detection. Returns the index, and stores the confidence * value into pConfidence. * * @param projectedTestFace the projected test face * @param pConfidencePointer a pointer containing the confidence value * @param iTestFace the test face index * @return the index */ private int findNearestNeighbor(float projectedTestFace[], FloatPointer pConfidencePointer) { double leastDistSq = Double.MAX_VALUE; int i = 0; int iTrain = 0; int iNearest = 0; LOGGER.info("................"); LOGGER.info("find nearest neighbor from " + nTrainFaces + " training faces"); for (iTrain = 0; iTrain < nTrainFaces; iTrain++) { // LOGGER.info("considering training face " + (iTrain + 1)); double distSq = 0; for (i = 0; i < nEigens; i++) { // LOGGER.debug(" projected test face distance from eigenface " + (i + 1) + " is " + // projectedTestFace[i]); float projectedTrainFaceDistance = (float) projectedTrainFaceMat.get(iTrain, i); float d_i = projectedTestFace[i] - projectedTrainFaceDistance; distSq += d_i * d_i; // / eigenValMat.data_fl().get(i); // Mahalanobis distance (might give // better results than Eucalidean distance) // if (iTrain < 5) { // LOGGER.info(" ** projected training face " + (iTrain + 1) + " distance from // eigenface " + (i + 1) + " is " + projectedTrainFaceDistance); // LOGGER.info(" distance between them " + d_i); // LOGGER.info(" distance squared " + distSq); // } } if (distSq < leastDistSq) { leastDistSq = distSq; iNearest = iTrain; LOGGER.info( " training face " + (iTrain + 1) + " is the new best match, least squared distance: " + leastDistSq); } } // Return the confidence level based on the Euclidean distance, // so that similar images should give a confidence between 0.5 to 1.0, // and very different images should give a confidence between 0.0 to 0.5. float pConfidence = (float) (1.0f - Math.sqrt(leastDistSq / (float) (nTrainFaces * nEigens)) / 255.0f); pConfidencePointer.put(pConfidence); LOGGER.info( "training face " + (iNearest + 1) + " is the final best match, confidence " + pConfidence); return iNearest; }
/* From a normalized image file to recognized if match current trained data. */ public void recognize(String tmpDetectImageOutput, String trainedOutput, String personName) { File folder = new File(tmpDetectImageOutput); File[] listOfFiles = folder.listFiles(); ArrayList<String> testFaceFileNames = new ArrayList<String>(); String answer = ""; // load image to testFaces array list for (int i = 0; i < listOfFiles.length; i++) { if (listOfFiles[i].isFile()) { String file = listOfFiles[i].getName(); String filepath = tmpDetectImageOutput + "/" + file; IplImage tmpImage = cvLoadImage(filepath, CV_LOAD_IMAGE_GRAYSCALE); if (tmpImage != null) { testFaces.add(tmpImage); testFaceFileNames.add(filepath); } } } CvMat trainPersonNumMat = loadTrainingData(trainedOutput, personName); // int ntestfaces = testFaces.size() ; int nTestFaces = testFaces.size(); LOGGER.info(trainedOutput + "/" + personName + ".xml"); System.out.println("total: " + nTestFaces + " to be tested latter..."); personNumTruthMat = cvCreateMat(1, nTestFaces, CV_32SC1); // type, 32- float[] projectedTestFace = new float[nEigens]; float confidence = 0.0f; int nCorrect = 0; int nWrong = 0; double timeFaceRecognizeStart = (double) cvGetTickCount(); // supposedly to record the timing?? for (int i = 0; i < nTestFaces; i++) { int iNearest; int nearest; int truth; // project the test image onto the PCA subspace LOGGER.info("before find decomposite.."); cvEigenDecomposite( testFaces.get(i), nEigens, // nEigObjs eigenVectArr, // eigInput (Pointer) 0, // ioFlags null, // userData pAvgTrainImg, // avg projectedTestFace); // coeffs // LOGGER.info("projectedTestFace\n" + floatArrayToString(projectedTestFace)); final FloatPointer pConfidence = new FloatPointer(confidence); LOGGER.info("before find nearest..."); iNearest = findNearestNeighbor(projectedTestFace, new FloatPointer(pConfidence)); confidence = pConfidence.get(); truth = personNumTruthMat.data_i().get(i); nearest = trainPersonNumMat.data_i().get(iNearest); if (nearest == truth) { answer = "Correct"; nCorrect++; } else { answer = "WRONG!"; nWrong++; } LOGGER.info(testFaceFileNames.get(i)); LOGGER.info( "nearest = " + nearest + ", Truth = " + truth + " (" + answer + "). Confidence = " + confidence); } }
public void learn(String tmpImageDataOutput) { File folder = new File(tmpImageDataOutput); File[] listOfFiles = folder.listFiles(); // load image to trainingFaces for (int i = 0; i < listOfFiles.length; i++) { if (listOfFiles[i].isFile()) { String file = listOfFiles[i].getName(); String filepath = tmpImageDataOutput + "/" + file; IplImage tmpImage = cvLoadImage(filepath, CV_LOAD_IMAGE_GRAYSCALE); if (tmpImage != null) { trainingFaces.add(tmpImage); } } } int nfaces = trainingFaces.size(); System.out.println("total: " + nfaces); // Does the Principal Component Analysis, finding the average image and the eigenfaces that // represent any image in the given dataset.....so call PCA // set the number of eigenvalues to use nTrainFaces = nfaces; nEigens = nTrainFaces - 1; CvTermCriteria calcLimit; CvSize faceImgSize = new CvSize(); faceImgSize.width(trainingFaces.get(0).width()); faceImgSize.height(trainingFaces.get(0).height()); personNumTruthMat = cvCreateMat(1, nfaces, CV_32SC1); // rows ,nFaces, type, 32-bit unsigned, one channel // initialize the person number matrix - for ease of debugging for (int j1 = 0; j1 < nfaces; j1++) { personNumTruthMat.put(0, j1, 0); } for (int i = 0; i < nEigens; i++) { eigens.add( cvCreateImage( faceImgSize, // size IPL_DEPTH_32F, // depth 1)); // channels) } eigenValMat = cvCreateMat(1, nEigens, CV_32FC1); // type, 32-bit float, 1 channel // allocate the averaged image pAvgTrainImg = cvCreateImage( faceImgSize, // size IPL_DEPTH_32F, // depth 1); // channels // set the PCA termination criterion calcLimit = cvTermCriteria( CV_TERMCRIT_ITER, // type nEigens, // max_iter 1); // epsilon LOGGER.info("computing average image, eigenvalues and eigenvectors"); // compute average image, eigenvalues, and eigenvectors eigenVectArr = eigens.toArray(new IplImage[eigens.size()]); cvCalcEigenObjects( nTrainFaces, // nObjects trainingFaces.toArray(new IplImage[trainingFaces.size()]), // input eigenVectArr, // the output is array... need to transfer to arrayList latter CV_EIGOBJ_NO_CALLBACK, // ioFlags 0, // ioBufSize null, // userData calcLimit, pAvgTrainImg, // avg eigenValMat.data_fl()); // eigVals // eigens = (ArrayList) Arrays.asList(eigenVectArr) ; LOGGER.info("normalizing the eigenvectors"); cvNormalize( eigenValMat, // src (CvArr) eigenValMat, // dst (CvArr) 1, // a 0, // b CV_L1, // norm_type null); // mask LOGGER.info("projecting the training images onto the PCA subspace"); // project the training images onto the PCA subspace projectedTrainFaceMat = cvCreateMat( nTrainFaces, // rows nEigens, // cols CV_32FC1); // type, 32-bit float, 1 channel // initialize the training face matrix - for ease of debugging for (int i1 = 0; i1 < nTrainFaces; i1++) { for (int j1 = 0; j1 < nEigens; j1++) { projectedTrainFaceMat.put(i1, j1, 0.0); } } LOGGER.info( "created projectedTrainFaceMat with " + nTrainFaces + " (nTrainFaces) rows and " + nEigens + " (nEigens) columns"); /** * nTrainFace should always > 5 !!!!!!!!!!!!!!!! if (nTrainFaces < 5) { * LOGGER.info("projectedTrainFaceMat contents:\n" + * oneChannelCvMatToString(projectedTrainFaceMat)); } */ final FloatPointer floatPointer = new FloatPointer(nEigens); for (int i = 0; i < nTrainFaces; i++) { cvEigenDecomposite( trainingFaces.get(i), // obj nEigens, // nEigObjs eigenVectArr, // eigInput (Pointer) 0, // ioFlags null, // userData (Pointer) pAvgTrainImg, // avg floatPointer); // coeffs (FloatPointer) /*nTrainFace should always > 5 !!!!!!!!!!!!!!!! if (nTrainFaces < 5) { LOGGER.info("floatPointer: " + floatPointerToString(floatPointer)); }*/ for (int j1 = 0; j1 < nEigens; j1++) { projectedTrainFaceMat.put(i, j1, floatPointer.get(j1)); } } /*nTrainFace should always > 5 !!!!!!!!!!!!!!!! if (nTrainFaces < 5) { LOGGER.info("projectedTrainFaceMat after cvEigenDecomposite:\n" + projectedTrainFaceMat); } */ // store the recognition data as an xml file // storeTrainingData(); // Save all the eigenvectors as images, so that they can be checked. // storeEigenfaceImages(); }