@Override public PointTracker<GrayF32> createTracker() { DescribePointBrief<GrayF32> brief = FactoryDescribePointAlgs.brief( FactoryBriefDefinition.gaussian2(new Random(123), 16, 512), FactoryBlurFilter.gaussian(GrayF32.class, 0, 4)); GeneralFeatureDetector<GrayF32, GrayF32> corner = FactoryDetectPoint.createShiTomasi( new ConfigGeneralDetector(100, 2, 0), false, GrayF32.class); InterestPointDetector<GrayF32> detector = FactoryInterestPoint.wrapPoint(corner, 1, GrayF32.class, GrayF32.class); ScoreAssociateHamming_B score = new ScoreAssociateHamming_B(); AssociateDescription<TupleDesc_B> association = FactoryAssociation.greedy(score, 400, true); PointTracker<GrayF32> pointTracker = FactoryPointTracker.combined( detector, null, new WrapDescribeBrief<>(brief, GrayF32.class), association, null, 20, GrayF32.class); return pointTracker; }
public <II extends ImageSingleBand> double[][] harder(BufferedImage image) { MultiSpectral<ImageFloat32> colorImage = ConvertBufferedImage.convertFromMulti(image, null, true, ImageFloat32.class); // convert the color image to greyscale ImageFloat32 greyscaleImage = ConvertImage.average((MultiSpectral<ImageFloat32>) colorImage, null); // SURF works off of integral images Class<II> integralType = GIntegralImageOps.getIntegralType(ImageFloat32.class); // define the feature detection algorithm NonMaxSuppression extractor = FactoryFeatureExtractor.nonmax(new ConfigExtract(2, detectThreshold, 5, true)); FastHessianFeatureDetector<II> detector = new FastHessianFeatureDetector<II>(extractor, maxFeaturesPerScale, 2, 9, 4, 4); // estimate orientation OrientationIntegral<II> orientation = FactoryOrientationAlgs.sliding_ii(null, integralType); DescribePointSurf<II> descriptor = FactoryDescribePointAlgs.<II>surfStability(null, integralType); // compute the integral image of the greyscale 'image' II integralgrey = GeneralizedImageOps.createSingleBand( integralType, greyscaleImage.width, greyscaleImage.height); GIntegralImageOps.transform(greyscaleImage, integralgrey); // detect fast hessian features detector.detect(integralgrey); // === This is the point were the code starts deviating from the standard SURF! === // tell algorithms which image to process orientation.setImage(integralgrey); List<ScalePoint> points = detector.getFoundPoints(); double[][] descriptions = new double[points.size()][3 * descriptor.getDescriptionLength()]; double[] angles = new double[points.size()]; int l = 0; for (ScalePoint p : points) { orientation.setScale(p.scale); angles[l] = orientation.compute(p.x, p.y); l++; } for (int i = 0; i < 3; i++) { // check if it is actually a greyscale image, take always the 1st band! ImageFloat32 colorImageBand = null; if (colorImage.getNumBands() == 1) { colorImageBand = colorImage.getBand(0); } else { colorImageBand = colorImage.getBand(i); } // compute the integral image of the i-th band of the color 'image' II integralband = GeneralizedImageOps.createSingleBand( integralType, colorImageBand.width, colorImageBand.height); GIntegralImageOps.transform(colorImageBand, integralband); // tell algorithms which image to process // orientation.setImage(integralband); descriptor.setImage(integralband); int j = 0; for (ScalePoint p : points) { // estimate orientation // orientation.setScale(p.scale); // double angle = orientation.compute(p.x, p.y); // extract the SURF description for this region SurfFeature desc = descriptor.createDescription(); descriptor.describe(p.x, p.y, angles[j], p.scale, (TupleDesc_F64) desc); double[] banddesc = desc.getValue(); if (perBandNormalization) { banddesc = Normalization.normalizeL2(banddesc); } for (int k = 0; k < SURFLength; k++) { descriptions[j][i * SURFLength + k] = banddesc[k]; } j++; } } return descriptions; }