コード例 #1
0
  /**
   * Default main
   *
   * @param args Command-line arguments
   */
  public static void main(final String[] args) {
    // Open the audio stream
    final XuggleAudio a =
        new XuggleAudio(
            AudioWaveformPlotter.class.getResource(
                "/org/openimaj/demos/audio/140bpm_formware_psytech.mp3"));

    // This is how wide we're going to draw the display
    final int w = 1920;

    // This is how high we'll draw the display
    final int h = 200;

    final MBFImage img =
        org.openimaj.vis.audio.AudioOverviewVisualisation.getAudioWaveformImage(
            a, w, h, new Float[] {0f, 0f, 0f, 1f}, new Float[] {1f, 1f, 1f, 1f});

    // Write the image to a file.
    try {
      ImageUtilities.write(img, "png", new File("audioWaveform.png"));
    } catch (final IOException e) {
      e.printStackTrace();
    }

    // Display the image
    DisplayUtilities.display(img);
  }
コード例 #2
0
ファイル: TemplateMatcher.java プロジェクト: MAQ11/openimaj
  /**
   * Testing
   *
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    FImage image = ImageUtilities.readF(new File("/Users/jsh2/Desktop/image.png"));
    FImage template = image.extractROI(100, 100, 100, 100);
    image.fill(0f);
    image.drawImage(template, 100, 100);

    TemplateMatcher matcher = new TemplateMatcher(template, Mode.CORRELATION);
    matcher.setSearchBounds(new Rectangle(100, 100, 200, 200));
    image.analyseWith(matcher);
    DisplayUtilities.display(matcher.responseMap.normalise());

    MBFImage cimg = image.toRGB();
    for (FValuePixel p : matcher.getBestResponses(10)) {
      System.out.println(p);
      cimg.drawPoint(p, RGBColour.RED, 1);
    }

    cimg.drawShape(matcher.getSearchBounds(), RGBColour.BLUE);
    cimg.drawShape(new Rectangle(100, 100, 100, 100), RGBColour.GREEN);

    DisplayUtilities.display(cimg);
  }
コード例 #3
0
  /**
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    FImage img1, img2;

    int nFeatures = 100;
    TrackingContext tc = new TrackingContext();
    FeatureList fl = new FeatureList(nFeatures);
    KLTTracker tracker = new KLTTracker(tc, fl);

    System.out.println(tc);

    img1 = ImageUtilities.readF(Example1.class.getResourceAsStream("img0.pgm"));
    img2 = ImageUtilities.readF(Example1.class.getResourceAsStream("img1.pgm"));

    tracker.selectGoodFeatures(img1);

    System.out.println("\nIn first image:\n");
    for (int i = 0; i < fl.features.length; i++) {
      System.out.format(
          "Feature #%d:  (%f,%f) with value of %d\n",
          i, fl.features[i].x, fl.features[i].y, fl.features[i].val);
    }

    DisplayUtilities.display(fl.drawFeatures(img1));
    fl.writeFeatureList(null, "%3d");

    tracker.trackFeatures(img1, img2);

    System.out.println("\nIn second image:\n");
    for (int i = 0; i < fl.features.length; i++) {
      System.out.format(
          "Feature #%d:  (%f,%f) with value of %d\n",
          i, fl.features[i].x, fl.features[i].y, fl.features[i].val);
    }

    DisplayUtilities.display(fl.drawFeatures(img2));
    fl.writeFeatureList(null, "%5.1f");
  }
コード例 #4
0
  /**
   * an example run
   *
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    float sd = 5;
    float si = 1.4f * sd;
    HessianIPD ipd = new HessianIPD(sd, si);
    FImage img =
        ImageUtilities.readF(
            AffineAdaption.class.getResourceAsStream("/org/openimaj/image/data/sinaface.jpg"));

    //		img = img.multiply(255f);

    //		ipd.findInterestPoints(img);
    //		List<InterestPointData> a = ipd.getInterestPoints(1F/(256*256));
    //
    //		System.out.println("Found " + a.size() + " features");
    //
    //		AffineAdaption adapt = new AffineAdaption();
    //		EllipticKeyPoint kpt = new EllipticKeyPoint();
    MBFImage outImg = new MBFImage(img.clone(), img.clone(), img.clone());
    //		for (InterestPointData d : a) {
    //
    ////			InterestPointData d = new InterestPointData();
    ////			d.x = 102;
    ////			d.y = 396;
    //			logger.info("Keypoint at: " + d.x + ", " + d.y);
    //			kpt.si = si;
    //			kpt.centre = new Pixel(d.x, d.y);
    //			kpt.size = 2 * 3 * kpt.si;
    //
    //			boolean converge = adapt.calcAffineAdaptation(img, kpt);
    //			if(converge)
    //			{
    //				outImg.drawShape(new
    // Ellipse(kpt.centre.x,kpt.centre.y,kpt.axes.getX(),kpt.axes.getY(),kpt.phi), RGBColour.BLUE);
    //				outImg.drawPoint(kpt.centre, RGBColour.RED,3);
    //			}
    //
    //
    //
    //			logger.info("... converged: "+ converge);
    //		}
    AffineAdaption adapt = new AffineAdaption(ipd, new IPDSelectionMode.Count(100));
    adapt.findInterestPoints(img);
    InterestPointVisualiser<Float[], MBFImage> ipv =
        InterestPointVisualiser.visualiseInterestPoints(outImg, adapt.points);
    DisplayUtilities.display(ipv.drawPatches(RGBColour.BLUE, RGBColour.RED));
  }