public static void main(String args[]) {
    VisualizeCirculantTrackerApp app =
        new VisualizeCirculantTrackerApp<ImageUInt8>(ImageUInt8.class);

    //		String fileName = "../data/applet/trackzing/track_peter.mjpeg";
    String fileName = "../data/applet/tracking/day_follow_car.mjpeg";

    SimpleImageSequence<ImageUInt8> sequence =
        DefaultMediaManager.INSTANCE.openVideo(fileName, ImageType.single(ImageUInt8.class));

    app.process(sequence);
  }
  public static void evaluate(String dataset) {
    Class type = ImageFloat32.class;

    DebugTldTrackerTldData generator = new DebugTldTrackerTldData(ImageType.single(type));

    InterpolatePixelS interpolate = FactoryInterpolation.bilinearPixelS(type, BorderType.EXTENDED);
    ImageGradient gradient = FactoryDerivative.sobel(type, type);

    TldTracker tracker = new TldTracker(null, interpolate, gradient, type, type);

    generator.evaluate(dataset, tracker);
  }
  protected MonocularPlaneVisualOdometry<ImageUInt8> createAlgorithm() {

    PkltConfig config = new PkltConfig();
    config.pyramidScaling = new int[] {1, 2, 4, 8};
    config.templateRadius = 3;
    ConfigGeneralDetector configDetector = new ConfigGeneralDetector(600, 3, 1);

    PointTracker<ImageUInt8> tracker =
        FactoryPointTracker.klt(config, configDetector, ImageUInt8.class, ImageSInt16.class);

    return FactoryVisualOdometry.monoPlaneInfinity(
        50, 2, 1.5, 300, tracker, ImageType.single(ImageUInt8.class));
  }
  private StitchingFromMotion2D<GrayU8, Affine2D_F64> createStabilization() {

    ConfigGeneralDetector config = new ConfigGeneralDetector();
    config.maxFeatures = 150;
    config.threshold = 40;
    config.radius = 3;

    PointTracker<GrayU8> tracker =
        FactoryPointTracker.klt(new int[] {1, 2, 4}, config, 3, GrayU8.class, GrayS16.class);

    ImageMotion2D<GrayU8, Affine2D_F64> motion =
        FactoryMotion2D.createMotion2D(
            100, 1.5, 2, 40, 0.5, 0.6, false, tracker, new Affine2D_F64());

    return FactoryMotion2D.createVideoStitch(0.2, motion, ImageType.single(GrayU8.class));
  }
 public void setDistorted(IntrinsicParameters param, DenseMatrix64F rect) {
   if (rect == null) {
     this.undoRadial =
         LensDistortionOps.imageRemoveDistortion(
             AdjustmentType.FULL_VIEW,
             BorderType.VALUE,
             param,
             null,
             ImageType.single(ImageFloat32.class));
     this.remove_p_to_p =
         LensDistortionOps.transform_F32(AdjustmentType.FULL_VIEW, param, null, false);
   } else {
     this.undoRadial =
         RectifyImageOps.rectifyImage(param, rect, BorderType.VALUE, ImageFloat32.class);
     this.remove_p_to_p = RectifyImageOps.transformPixelToRect_F32(param, rect);
   }
 }
예제 #6
0
 protected ShowGradient() {
   super(ImageType.single(GrayU8.class));
 }
 @Override
 public ImageType<ImageUInt8> getOutputType() {
   return ImageType.single(ImageUInt8.class);
 }
예제 #8
0
 @Override
 public ImageType<ImageFloat32> getImageType() {
   return ImageType.single(ImageFloat32.class);
 }
 public TestImplImageDistort_I8() {
   super(ImageType.single(GrayU8.class));
 }
 public PointProcessing(StitchingFromMotion2D<GrayU8, Affine2D_F64> alg) {
   super(ImageType.single(GrayU8.class));
   this.alg = alg;
 }
예제 #11
0
 @Override
 public ImageType<T> getImageType() {
   return ImageType.single(imageType);
 }
  public static void main(String[] args) {

    // Example with a moving camera.  Highlights why motion estimation is sometimes required
    String fileName = UtilIO.pathExample("tracking/chipmunk.mjpeg");
    // Camera has a bit of jitter in it.  Static kinda works but motion reduces false positives
    //		String fileName = UtilIO.pathExample("background/horse_jitter.mp4");

    // Comment/Uncomment to switch input image type
    ImageType imageType = ImageType.single(GrayF32.class);
    //		ImageType imageType = ImageType.il(3, InterleavedF32.class);
    //		ImageType imageType = ImageType.il(3, InterleavedU8.class);

    // Configure the feature detector
    ConfigGeneralDetector confDetector = new ConfigGeneralDetector();
    confDetector.threshold = 10;
    confDetector.maxFeatures = 300;
    confDetector.radius = 6;

    // Use a KLT tracker
    PointTracker tracker =
        FactoryPointTracker.klt(new int[] {1, 2, 4, 8}, confDetector, 3, GrayF32.class, null);

    // This estimates the 2D image motion
    ImageMotion2D<GrayF32, Homography2D_F64> motion2D =
        FactoryMotion2D.createMotion2D(
            500, 0.5, 3, 100, 0.6, 0.5, false, tracker, new Homography2D_F64());

    ConfigBackgroundBasic configBasic = new ConfigBackgroundBasic(30, 0.005f);

    // Configuration for Gaussian model.  Note that the threshold changes depending on the number of
    // image bands
    // 12 = gray scale and 40 = color
    ConfigBackgroundGaussian configGaussian = new ConfigBackgroundGaussian(12, 0.001f);
    configGaussian.initialVariance = 64;
    configGaussian.minimumDifference = 5;

    // Comment/Uncomment to switch background mode
    BackgroundModelMoving background =
        FactoryBackgroundModel.movingBasic(
            configBasic, new PointTransformHomography_F32(), imageType);
    //				FactoryBackgroundModel.movingGaussian(configGaussian, new PointTransformHomography_F32(),
    // imageType);

    MediaManager media = DefaultMediaManager.INSTANCE;
    SimpleImageSequence video = media.openVideo(fileName, background.getImageType());
    //				media.openCamera(null,640,480,background.getImageType());

    // ====== Initialize Images

    // storage for segmented image.  Background = 0, Foreground = 1
    GrayU8 segmented = new GrayU8(video.getNextWidth(), video.getNextHeight());
    // Grey scale image that's the input for motion estimation
    GrayF32 grey = new GrayF32(segmented.width, segmented.height);

    // coordinate frames
    Homography2D_F32 firstToCurrent32 = new Homography2D_F32();
    Homography2D_F32 homeToWorld = new Homography2D_F32();
    homeToWorld.a13 = grey.width / 2;
    homeToWorld.a23 = grey.height / 2;

    // Create a background image twice the size of the input image.  Tell it that the home is in the
    // center
    background.initialize(grey.width * 2, grey.height * 2, homeToWorld);

    BufferedImage visualized =
        new BufferedImage(segmented.width, segmented.height, BufferedImage.TYPE_INT_RGB);
    ImageGridPanel gui = new ImageGridPanel(1, 2);
    gui.setImages(visualized, visualized);

    ShowImages.showWindow(gui, "Detections", true);

    double fps = 0;
    double alpha = 0.01; // smoothing factor for FPS

    while (video.hasNext()) {
      ImageBase input = video.next();

      long before = System.nanoTime();
      GConvertImage.convert(input, grey);

      if (!motion2D.process(grey)) {
        throw new RuntimeException("Should handle this scenario");
      }

      Homography2D_F64 firstToCurrent64 = motion2D.getFirstToCurrent();
      UtilHomography.convert(firstToCurrent64, firstToCurrent32);

      background.segment(firstToCurrent32, input, segmented);
      background.updateBackground(firstToCurrent32, input);
      long after = System.nanoTime();

      fps = (1.0 - alpha) * fps + alpha * (1.0 / ((after - before) / 1e9));

      VisualizeBinaryData.renderBinary(segmented, false, visualized);
      gui.setImage(0, 0, (BufferedImage) video.getGuiImage());
      gui.setImage(0, 1, visualized);
      gui.repaint();

      System.out.println("FPS = " + fps);

      try {
        Thread.sleep(5);
      } catch (InterruptedException e) {
      }
    }
  }
예제 #13
0
 @Override
 public ImageType<Output> getDerivativeType() {
   return ImageType.single((Class) m.getParameterTypes()[2]);
 }