protected static void rotateXAxis(Mat rotation) { // get the matrix corresponding to the rotation vector Mat R = new Mat(3, 3, CvType.CV_64FC1); Calib3d.Rodrigues(rotation, R); // create the matrix to rotate 90º around the X axis // 1, 0, 0 // 0 cos -sin // 0 sin cos double[] rot = { 1, 0, 0, 0, 0, -1, 0, 1, 0 }; // multiply both matrix Mat res = new Mat(3, 3, CvType.CV_64FC1); double[] prod = new double[9]; double[] a = new double[9]; R.get(0, 0, a); for (int i = 0; i < 3; i++) for (int j = 0; j < 3; j++) { prod[3 * i + j] = 0; for (int k = 0; k < 3; k++) { prod[3 * i + j] += a[3 * i + k] * rot[3 * k + j]; } } // convert the matrix to a vector with rodrigues back res.put(0, 0, prod); Calib3d.Rodrigues(res, rotation); }
public static void draw3dAxis( Mat frame, CameraParameters cp, Scalar color, double height, Mat Rvec, Mat Tvec) { // Mat objectPoints = new Mat(4,3,CvType.CV_32FC1); MatOfPoint3f objectPoints = new MatOfPoint3f(); Vector<Point3> points = new Vector<Point3>(); points.add(new Point3(0, 0, 0)); points.add(new Point3(height, 0, 0)); points.add(new Point3(0, height, 0)); points.add(new Point3(0, 0, height)); objectPoints.fromList(points); MatOfPoint2f imagePoints = new MatOfPoint2f(); Calib3d.projectPoints( objectPoints, Rvec, Tvec, cp.getCameraMatrix(), cp.getDistCoeff(), imagePoints); List<Point> pts = new Vector<Point>(); Converters.Mat_to_vector_Point(imagePoints, pts); Core.line(frame, pts.get(0), pts.get(1), color, 2); Core.line(frame, pts.get(0), pts.get(2), color, 2); Core.line(frame, pts.get(0), pts.get(3), color, 2); Core.putText(frame, "X", pts.get(1), Core.FONT_HERSHEY_SIMPLEX, 0.5, color, 2); Core.putText(frame, "Y", pts.get(2), Core.FONT_HERSHEY_SIMPLEX, 0.5, color, 2); Core.putText(frame, "Z", pts.get(3), Core.FONT_HERSHEY_SIMPLEX, 0.5, color, 2); }
protected static void glGetModelViewMatrix(double[] modelview_matrix, Mat Rvec, Mat Tvec) throws ExtParamException { // check if parameters are valid boolean invalid = false; double[] tvec = new double[3]; double[] rvec = new double[3]; Rvec.get(0, 0, rvec); Tvec.get(0, 0, tvec); for (int i = 0; i < 3 && !invalid; i++) { if (tvec[i] != -999999) invalid |= false; if (rvec[i] != -999999) invalid |= false; } if (invalid) throw new ExtParamException("extrinsic parameters are not set Marker.getModelViewMatrix"); Mat Rot = new Mat(3, 3, CvType.CV_32FC1); Mat Jacob = new Mat(); Calib3d.Rodrigues(Rvec, Rot, Jacob); // TODO jacob no se vuelve a usar double[][] para = new double[3][4]; double[] rotvec = new double[9]; Rot.get(0, 0, rotvec); for (int i = 0; i < 3; i++) for (int j = 0; j < 3; j++) para[i][j] = rotvec[3 * i + j]; // now, add the translation para[0][3] = tvec[0]; para[1][3] = tvec[1]; para[2][3] = tvec[2]; double scale = 1; // R1C2 modelview_matrix[0 + 0 * 4] = para[0][0]; modelview_matrix[0 + 1 * 4] = para[0][1]; modelview_matrix[0 + 2 * 4] = para[0][2]; modelview_matrix[0 + 3 * 4] = para[0][3]; // R2 modelview_matrix[1 + 0 * 4] = para[1][0]; modelview_matrix[1 + 1 * 4] = para[1][1]; modelview_matrix[1 + 2 * 4] = para[1][2]; modelview_matrix[1 + 3 * 4] = para[1][3]; // R3 modelview_matrix[2 + 0 * 4] = -para[2][0]; modelview_matrix[2 + 1 * 4] = -para[2][1]; modelview_matrix[2 + 2 * 4] = -para[2][2]; modelview_matrix[2 + 3 * 4] = -para[2][3]; modelview_matrix[3 + 0 * 4] = 0.0f; modelview_matrix[3 + 1 * 4] = 0.0f; modelview_matrix[3 + 2 * 4] = 0.0f; modelview_matrix[3 + 3 * 4] = 1.0f; if (scale != 0.0) { modelview_matrix[12] *= scale; modelview_matrix[13] *= scale; modelview_matrix[14] *= scale; } // rotate 90º around the x axis // rotating around x axis in OpenGL is equivalent to // multiply the model matrix by the matrix: // 1, 0, 0, 0, 0, cos(a), sin(a), 0, 0, -sin(a), cos(a), 0, 0, 0, 0, 1 // double[] auxRotMat = new double[]{ // 1, 0, 0, 0, // 0, 0, 1, 0, // 0, -1, 0, 0, // 0, 0, 0, 1 // }; // Utils.matrixProduct(modelview_matrix1, auxRotMat, modelview_matrix); }
boolean decodeJPEGAndAnalyze(Context context, CalibrationEntries calibrationEntries) { boolean result = false; BitmapFactory.Options opt = new BitmapFactory.Options(); opt.inPreferQualityOverSpeed = true; BitmapRegionDecoder brd; byte[] data = mData; try { brd = BitmapRegionDecoder.newInstance(data, 0, data.length, false); Rect rect; rect = new Rect(0, 0, brd.getWidth(), brd.getHeight()); Bitmap bitmap = brd.decodeRegion(rect, opt); MatBitmapHolder mRgbaMatHolder = new MatBitmapHolder(bitmap); Mat rgbaMat = mRgbaMatHolder.pin(); Mat grayData = new Mat(); Imgproc.cvtColor(rgbaMat, grayData, Imgproc.COLOR_RGB2GRAY, 1); Mat centersCalibCircles = new Mat(); Size patternSize = CalibrationEntries.Pattern_ASYMMETRIC_CIRCLES_GRID_SIZE; Size imageSize = new Size(grayData.cols(), grayData.rows()); boolean patternWasFound = Calib3d.findCirclesGridDefault( grayData, patternSize, centersCalibCircles, Calib3d.CALIB_CB_ASYMMETRIC_GRID); if (patternWasFound && this.orientation != null) { int addedIdx = calibrationEntries.addEntry(this.orientation, centersCalibCircles); if (addedIdx >= 0) { Log.d( "CALIB", String.format( "PictureCapture: Added calibration entry at %d tot: %d", addedIdx, calibrationEntries.getNumEntries())); if (calibrationEntries.getNewlyAdded() > 5) { List<Mat> imagePoints = calibrationEntries.getPoints(); List<Mat> objectPoints = calibrationEntries.getObjectPointsAsymmentricList(imagePoints.size()); if (CalibrationEntries.isEnoughCalibrationPoints(imagePoints.size())) { calibrationEntries.resetNewlyAdded(); CameraCalibrationData cameraCalibrationData = new CameraCalibrationData(); List<Mat> rvecs = new Vector<Mat>(imagePoints.size()); List<Mat> tvecs = new Vector<Mat>(imagePoints.size()); int flags = 0; flags |= Calib3d.CALIB_FIX_K4 | Calib3d.CALIB_FIX_K5; Log.d("CALIB", String.format("PictureCapture: Calling Calib3d.calibrateCamera")); Mat K = new Mat(); Mat kdist = new Mat(); double rms = Calib3d.calibrateCamera( objectPoints, imagePoints, imageSize, K, kdist, rvecs, tvecs, flags); double[] Karray = new double[9]; double[] distcoeffs_array = new double[5]; K.get(0, 0, Karray); kdist.get(0, 0, distcoeffs_array); cameraCalibrationData.setData( grayData.cols(), grayData.rows(), Karray, distcoeffs_array, rms); Log.d( "CALIB", String.format( "PictureCapture: Calibration data: %s", cameraCalibrationData.formatCalibrationDataString())); calibrationEntries.setCalibrationData(cameraCalibrationData); result = true; } } } } // mRightBitmap = brd.decodeRegion(rect, opt); rect = new Rect(brd.getWidth() - brd.getWidth() / 3, 0, brd.getWidth(), brd.getHeight()); // mLeftBitmap = brd.decodeRegion(rect, opt); mRgbaMatHolder.unpin(rgbaMat); } catch (IOException e) { XLog.e("Region decoder doesn't want to cooperate", e); } return result; }
/** * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec> * * @param recs output ArrayList of AttitudeRec * @return total number of frame of the video */ private int analyzeVideo(ArrayList<AttitudeRec> recs) { VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json")); int decimation = 1; if (meta.fps > DECIMATION_FPS_TARGET) { decimation = (int) (meta.fps / DECIMATION_FPS_TARGET); meta.fps /= decimation; } VideoDecoderForOpenCV videoDecoder = new VideoDecoderForOpenCV( new File(mPath, "video.mp4"), decimation); // every 3 frame process 1 frame Mat frame; Mat gray = new Mat(); int i = -1; Size frameSize = videoDecoder.getSize(); if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) { // this is very unlikely return -1; } if (TRACE_VIDEO_ANALYSIS) { Debug.startMethodTracing("cvprocess"); } Size patternSize = new Size(4, 11); float fc = (float) (meta.frameWidth / 2.0 / Math.tan(meta.fovWidth / 2.0)); Mat camMat = cameraMatrix(fc, new Size(frameSize.width / 2, frameSize.height / 2)); MatOfDouble coeff = new MatOfDouble(); // dummy MatOfPoint2f centers = new MatOfPoint2f(); MatOfPoint3f grid = asymmetricalCircleGrid(patternSize); Mat rvec = new MatOfFloat(); Mat tvec = new MatOfFloat(); MatOfPoint2f reprojCenters = new MatOfPoint2f(); if (LOCAL_LOGV) { Log.v(TAG, "Camera Mat = \n" + camMat.dump()); } long startTime = System.nanoTime(); while ((frame = videoDecoder.getFrame()) != null) { if (LOCAL_LOGV) { Log.v(TAG, "got a frame " + i); } // has to be in front, as there are cases where execution // will skip the later part of this while i++; // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY); boolean foundPattern = Calib3d.findCirclesGridDefault( gray, patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID); if (!foundPattern) { // skip to next frame continue; } if (OUTPUT_DEBUG_IMAGE) { Calib3d.drawChessboardCorners(frame, patternSize, centers, true); } // figure out the extrinsic parameters using real ground truth 3D points and the pixel // position of blobs found in findCircleGrid, an estimated camera matrix and // no-distortion are assumed. boolean foundSolution = Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec, false, Calib3d.CV_ITERATIVE); if (!foundSolution) { // skip to next frame if (LOCAL_LOGV) { Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped."); } continue; } // reproject points to for evaluation of result accuracy of solvePnP Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters); // error is evaluated in norm2, which is real error in pixel distance / sqrt(2) double error = Core.norm(centers, reprojCenters, Core.NORM_L2); if (LOCAL_LOGV) { Log.v(TAG, "Found attitude, re-projection error = " + error); } // if error is reasonable, add it into the results if (error < REPROJECTION_THREASHOLD) { double[] rv = new double[3]; rvec.get(0, 0, rv); recs.add(new AttitudeRec((double) i / meta.fps, rodr2rpy(rv))); } if (OUTPUT_DEBUG_IMAGE) { Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true); Highgui.imwrite( Environment.getExternalStorageDirectory().getPath() + "/RVCVRecData/DebugCV/img" + i + ".png", frame); } } if (LOCAL_LOGV) { Log.v(TAG, "Finished decoding"); } if (TRACE_VIDEO_ANALYSIS) { Debug.stopMethodTracing(); } if (LOCAL_LOGV) { // time analysis double totalTime = (System.nanoTime() - startTime) / 1e9; Log.i(TAG, "Total time: " + totalTime + "s, Per frame time: " + totalTime / i); } return i; }
private void detectObject() { readLock.lock(); featureDetector.detect(img_scene, keypoints_scene); featureDetector.detect(img_object, keypoints_object); extractor.compute(img_object, keypoints_object, descriptors_object); extractor.compute(img_scene, keypoints_scene, descriptors_scene); readLock.unlock(); if (!descriptors_scene.empty()) { matcher.match(descriptors_object, descriptors_scene, matches); // readLock.unlock(); // listMatches = matches.toList(); int size = descriptors_object.rows(); // -- Quick calculation of max and min distances between keypoints for (int i = 0; i < size; i++) { double dist = listMatches.get(i).distance; if (dist < min_dist) { min_dist = dist; } } Log.e("Min", min_dist + ""); threeMinDist = 3 * min_dist; listGoodMatches.removeAll(listGoodMatches); for (int i = 0; i < size; i++) { DMatch dMatch = listMatches.get(i); float distance = dMatch.distance; if (distance < threeMinDist) { listGoodMatches.add(dMatch); } } // good_matches.fromList(listGoodMatches); Log.e("Matches", listMatches.size() + ""); Log.e("Good Matches", listGoodMatches.size() + ""); // if (listGoodMatches.size() > 4) { Point pointObj[] = new Point[listGoodMatches.size()]; Point pointScene[] = new Point[listGoodMatches.size()]; listKeyPointObject = keypoints_object.toList(); listKeyPointScene = keypoints_scene.toList(); // listPointScene.removeAll(listPointScene); for (int i = 0; i < listGoodMatches.size(); i++) { // -- Get the keypoints from the good matches pointObj[i] = listKeyPointObject.get(listGoodMatches.get(i).queryIdx).pt; pointScene[i] = listKeyPointScene.get(listGoodMatches.get(i).trainIdx).pt; // listPointScene.add(listKeyPointScene.get(listGoodMatches.get(i).trainIdx).pt); } obj.fromArray(pointObj); scene.fromArray(pointScene); Log.e("Before findHomography", ""); H = Calib3d.findHomography(obj, scene, Calib3d.RANSAC, 9); Log.e("AFTERRR findHomography", ""); pointObjConners[0] = new Point(0, 0); pointObjConners[1] = new Point(img_object.cols(), 0); pointObjConners[2] = new Point(img_object.cols(), img_object.rows()); pointObjConners[3] = new Point(0, img_object.rows()); obj_corners.fromArray(pointObjConners); Core.perspectiveTransform(obj_corners, scene_corners, H); p0 = new Point(scene_corners.toList().get(0).x, scene_corners.toList().get(0).y + 0); p1 = new Point(scene_corners.toList().get(1).x, scene_corners.toList().get(1).y + 0); p2 = new Point(scene_corners.toList().get(2).x, scene_corners.toList().get(2).y + 0); p3 = new Point(scene_corners.toList().get(3).x, scene_corners.toList().get(3).y + 0); Log.e("POINT THREAD", p0.toString() + p1.toString() + p2.toString() + p3.toString()); Log.e("detect ok", "detect ok"); } } else { Log.e("No descritor", "No descritor"); // readLock.unlock(); } }