/** Give it a grid and see if it computed a legitimate homography */ @Test public void basicTest() { // create a grid an apply an arbitrary transform to it PlanarCalibrationTarget config = GenericCalibrationGrid.createStandardConfig(); DenseMatrix64F R = RotationMatrixGenerator.eulerXYZ(0.02, -0.05, 0.01, null); Vector3D_F64 T = new Vector3D_F64(0, 0, -1000); Se3_F64 motion = new Se3_F64(R, T); List<Point2D_F64> observations = GenericCalibrationGrid.observations(motion, config); // compute the homography Zhang99ComputeTargetHomography alg = new Zhang99ComputeTargetHomography(config); assertTrue(alg.computeHomography(observations)); DenseMatrix64F H = alg.getHomography(); // test this homography property: x2 = H*x1 List<Point2D_F64> gridPoints = config.points; for (int i = 0; i < observations.size(); i++) { Point2D_F64 a = GeometryMath_F64.mult(H, gridPoints.get(i), new Point2D_F64()); double diff = a.distance(observations.get(i)); assertEquals(0, diff, 1e-8); } }
/** * Renders a 3D point in the left and right camera views given the stereo parameters. Lens * distortion is taken in account. * * @param param Stereo parameters * @param X Point location in 3D space * @param left location in pixels in left camera * @param right location in pixels in right camera */ public static void renderPointPixel( StereoParameters param, Point3D_F64 X, Point2D_F64 left, Point2D_F64 right) { // compute the location of X in the right camera's reference frame Point3D_F64 rightX = new Point3D_F64(); SePointOps_F64.transform(param.getRightToLeft().invert(null), X, rightX); // location of object in normalized image coordinates Point2D_F64 normLeft = new Point2D_F64(X.x / X.z, X.y / X.z); Point2D_F64 normRight = new Point2D_F64(rightX.x / rightX.z, rightX.y / rightX.z); // convert into pixel coordinates Point2D_F64 pixelLeft = PerspectiveOps.convertNormToPixel(param.left, normLeft.x, normLeft.y, null); Point2D_F64 pixelRight = PerspectiveOps.convertNormToPixel(param.right, normRight.x, normRight.y, null); // take in account lens distortion Point2Transform2_F32 distLeft = LensDistortionOps.transformPoint(param.left).distort_F32(true, true); Point2Transform2_F32 distRight = LensDistortionOps.transformPoint(param.right).distort_F32(true, true); Point2D_F32 lensLeft = new Point2D_F32(); Point2D_F32 lensRight = new Point2D_F32(); distLeft.compute((float) pixelLeft.x, (float) pixelLeft.y, lensLeft); distRight.compute((float) pixelRight.x, (float) pixelRight.y, lensRight); // output solution left.set(lensLeft.x, lensLeft.y); right.set(lensRight.x, lensRight.y); }
public List<List<Point2D_F64>> getEdges() { if (this.scale != 1.0) { for (List<Point2D_F64> l : prunedCannyEdgeList) { for (Point2D_F64 p : l) { p.x *= this.scale; p.y *= this.scale; } } this.scale = 1.0; } return prunedCannyEdgeList; }
private void extractImageFeatures( MultiSpectral<T> color, T gray, FastQueue<TupleDesc> descs, List<Point2D_F64> locs) { detector.detect(gray); if (describe.getImageType().getFamily() == ImageType.Family.SINGLE_BAND) describe.setImage(gray); else describe.setImage(color); orientation.setImage(gray); if (detector.hasScale()) { for (int i = 0; i < detector.getNumberOfFeatures(); i++) { double yaw = 0; Point2D_F64 pt = detector.getLocation(i); double scale = detector.getScale(i); if (describe.requiresOrientation()) { orientation.setScale(scale); yaw = orientation.compute(pt.x, pt.y); } TupleDesc d = descs.grow(); if (describe.process(pt.x, pt.y, yaw, scale, d)) { locs.add(pt.copy()); } else { descs.removeTail(); } } } else { orientation.setScale(1); for (int i = 0; i < detector.getNumberOfFeatures(); i++) { double yaw = 0; Point2D_F64 pt = detector.getLocation(i); if (describe.requiresOrientation()) { yaw = orientation.compute(pt.x, pt.y); } TupleDesc d = descs.grow(); if (describe.process(pt.x, pt.y, yaw, 1, d)) { locs.add(pt.copy()); } else { descs.removeTail(); } } } }
public List<List<Point2D_F64>> getBlobRegions() { if (this.scale != 1.0) { for (List<Point2D_F64> l : blobQuads) { for (Point2D_F64 p : l) { p.x *= this.scale; p.y *= this.scale; } } this.scale = 1.0; } return blobQuads; // if (this.blobQuads.size() < 9) { // System.out.println("blobs can't find enough things that might be cards"); // } // else { // // // See whether the top 9/12/15 ROIs by size are roughly the same size // // Calculate areas and sort // int[] areas = new int[blobQuads.size()]; // for (int i=0; i<blobQuads.size(); i++) { // areas[i] = Segmenter.areaOfRegion(blobQuads.get(i)); // } // Arrays.sort(areas); // // // From the top, see if they're all within tolerance from the mean // int similarSizedLargest = -1; // if (blobQuads.size() >= 15 && Segmenter.topNSimilarSized(areas, 15)) { // similarSizedLargest = 15; // } // else if (blobQuads.size() >= 12 && Segmenter.topNSimilarSized(areas, 12)) { // similarSizedLargest = 12; // } // else if (Segmenter.topNSimilarSized(areas, 9)) { // similarSizedLargest = 9; // } // // if (similarSizedLargest != -1) { // List<List<Point2D_F64>> finalEdgeList = // blobQuads.subList(blobQuads.size()-similarSizedLargest, blobQuads.size()-1); // return finalEdgeList; // } // } // return null; }
@Override public void track(I image) { // update the image pyramid pyramid.process(image); if (derivX == null) { derivX = PyramidOps.declareOutput(pyramid, derivType); derivY = PyramidOps.declareOutput(pyramid, derivType); } PyramidOps.gradient(pyramid, gradient, derivX, derivY); tracker.setInputs(pyramid, derivX, derivY); if (first) { first = false; detector.detect(image); for (int i = 0; i < detector.getNumberOfFeatures(); i++) { Point2D_F64 p = detector.getLocation(i); PyramidKltFeature t = tracker.createNewTrack(); t.cookie = p.copy(); tracker.setDescription((float) p.x, (float) p.y, t); tracks.add(t); } } else { for (int i = 0; i < tracks.size(); ) { PyramidKltFeature t = tracks.get(i); if (!tracker.performTracking(t)) { tracks.remove(i); } else { i++; } } } }
/** * Given a sequence of points on the contour find the best fit line. * * @param contourIndex0 contour index of first point in the sequence * @param contourIndex1 contour index of last point (exclusive) in the sequence * @param line storage for the found line * @return true if successful or false if it failed */ boolean fitLine(int contourIndex0, int contourIndex1, LineGeneral2D_F64 line) { int numPixels = CircularIndex.distanceP(contourIndex0, contourIndex1, contour.size()); // if its too small if (numPixels < minimumLineLength) return false; Point2D_I32 c0 = contour.get(contourIndex0); Point2D_I32 c1 = contour.get(contourIndex1); double scale = c0.distance(c1); double centerX = (c1.x + c0.x) / 2.0; double centerY = (c1.y + c0.y) / 2.0; int numSamples = Math.min(20, numPixels); pointsFit.reset(); for (int i = 0; i < numSamples; i++) { int index = i * (numPixels - 1) / (numSamples - 1); Point2D_I32 c = contour.get(CircularIndex.addOffset(contourIndex0, index, contour.size())); Point2D_F64 p = pointsFit.grow(); p.x = (c.x - centerX) / scale; p.y = (c.y - centerY) / scale; } if (null == FitLine_F64.polar(pointsFit.toList(), linePolar)) { return false; } UtilLine2D_F64.convert(linePolar, line); // go from local coordinates into global line.C = scale * line.C - centerX * line.A - centerY * line.B; return true; }
@Override public void compute(double x, double y, Point2D_F64 out) { out.x = x; out.y = height - y; }