@Override public PointTracker<GrayF32> createTracker() { DescribePointBrief<GrayF32> brief = FactoryDescribePointAlgs.brief( FactoryBriefDefinition.gaussian2(new Random(123), 16, 512), FactoryBlurFilter.gaussian(GrayF32.class, 0, 4)); GeneralFeatureDetector<GrayF32, GrayF32> corner = FactoryDetectPoint.createShiTomasi( new ConfigGeneralDetector(100, 2, 0), false, GrayF32.class); InterestPointDetector<GrayF32> detector = FactoryInterestPoint.wrapPoint(corner, 1, GrayF32.class, GrayF32.class); ScoreAssociateHamming_B score = new ScoreAssociateHamming_B(); AssociateDescription<TupleDesc_B> association = FactoryAssociation.greedy(score, 400, true); PointTracker<GrayF32> pointTracker = FactoryPointTracker.combined( detector, null, new WrapDescribeBrief<>(brief, GrayF32.class), association, null, 20, GrayF32.class); return pointTracker; }
/** * Any arbitrary implementation of InterestPointDetector, OrientationImage, DescribeRegionPoint * can be combined into DetectDescribePoint. The syntax is more complex, but the end result is * more flexible. This should only be done if there isn't a pre-made DetectDescribePoint. */ public static <T extends ImageSingleBand, D extends TupleDesc> DetectDescribePoint<T, D> createFromComponents(Class<T> imageType) { // create a corner detector Class derivType = GImageDerivativeOps.getDerivativeType(imageType); GeneralFeatureDetector corner = FactoryDetectPoint.createShiTomasi(new ConfigGeneralDetector(1000, 5, 1), false, derivType); InterestPointDetector detector = FactoryInterestPoint.wrapPoint(corner, 1, imageType, derivType); // describe points using BRIEF DescribeRegionPoint describe = FactoryDescribeRegionPoint.brief(new ConfigBrief(true), imageType); // Combine together. // NOTE: orientation will not be estimated return FactoryDetectDescribe.fuseTogether(detector, null, describe); }
public VisualizeAssociationMatchesApp(Class<T> imageType, Class<D> derivType) { super(3); this.imageType = imageType; GeneralFeatureDetector<T, D> alg; addAlgorithm( 0, "Fast Hessian", FactoryInterestPoint.fastHessian(new ConfigFastHessian(1, 2, 200, 1, 9, 4, 4))); if (imageType == ImageFloat32.class) addAlgorithm( 0, "SIFT", FactoryInterestPoint.siftDetector(null, new ConfigSiftDetector(2, 5, 200, 5))); alg = FactoryDetectPoint.createShiTomasi(new ConfigGeneralDetector(500, 2, 1), false, derivType); addAlgorithm(0, "Shi-Tomasi", FactoryInterestPoint.wrapPoint(alg, 1, imageType, derivType)); addAlgorithm(1, "SURF-S", FactoryDescribeRegionPoint.surfStable(null, imageType)); addAlgorithm( 1, "SURF-S Color", FactoryDescribeRegionPoint.surfColorStable(null, ImageType.ms(3, imageType))); if (imageType == ImageFloat32.class) addAlgorithm(1, "SIFT", FactoryDescribeRegionPoint.sift(null, null)); addAlgorithm(1, "BRIEF", FactoryDescribeRegionPoint.brief(new ConfigBrief(true), imageType)); addAlgorithm(1, "BRIEFSO", FactoryDescribeRegionPoint.brief(new ConfigBrief(false), imageType)); addAlgorithm(1, "Pixel 11x11", FactoryDescribeRegionPoint.pixel(11, 11, imageType)); addAlgorithm(1, "NCC 11x11", FactoryDescribeRegionPoint.pixelNCC(11, 11, imageType)); addAlgorithm(2, "Greedy", false); addAlgorithm(2, "Backwards", true); // estimate orientation using this once since it is fast and accurate Class integralType = GIntegralImageOps.getIntegralType(imageType); OrientationIntegral orientationII = FactoryOrientationAlgs.sliding_ii(null, integralType); orientation = FactoryOrientation.convertImage(orientationII, imageType); imageLeft = new MultiSpectral<T>(imageType, 1, 1, 3); imageRight = new MultiSpectral<T>(imageType, 1, 1, 3); grayLeft = GeneralizedImageOps.createSingleBand(imageType, 1, 1); grayRight = GeneralizedImageOps.createSingleBand(imageType, 1, 1); setMainGUI(panel); }