@Override
 public void beforeUpdate(MBFImage frame) {
   if (transform != null) {
     MBFImage bgCopy = mbfImage.clone();
     MBFProjectionProcessor proj = new MBFProjectionProcessor();
     proj.setMatrix(transform);
     proj.accumulate(frame);
     proj.performProjection(0, 0, bgCopy);
     panel.setImage(bimg = ImageUtilities.createBufferedImageForDisplay(bgCopy, bimg));
   } else panel.setImage(bimg = ImageUtilities.createBufferedImageForDisplay(frame, bimg));
 }
示例#2
0
  /**
   * Default main
   *
   * @param args Command-line arguments
   */
  public static void main(final String[] args) {
    // Open the audio stream
    final XuggleAudio a =
        new XuggleAudio(
            AudioWaveformPlotter.class.getResource(
                "/org/openimaj/demos/audio/140bpm_formware_psytech.mp3"));

    // This is how wide we're going to draw the display
    final int w = 1920;

    // This is how high we'll draw the display
    final int h = 200;

    final MBFImage img =
        org.openimaj.vis.audio.AudioOverviewVisualisation.getAudioWaveformImage(
            a, w, h, new Float[] {0f, 0f, 0f, 1f}, new Float[] {1f, 1f, 1f, 1f});

    // Write the image to a file.
    try {
      ImageUtilities.write(img, "png", new File("audioWaveform.png"));
    } catch (final IOException e) {
      e.printStackTrace();
    }

    // Display the image
    DisplayUtilities.display(img);
  }
示例#3
0
  @Override
  public synchronized void afterUpdate(final VideoDisplay<MBFImage> display) {
    if (this.matcher != null && !this.videoFrame.isPaused()) {
      final MBFImage capImg = this.videoFrame.getVideo().getCurrentFrame();
      final LocalFeatureList<Keypoint> kpl = this.engine.findFeatures(capImg);

      final MBFImageRenderer renderer = capImg.createRenderer();
      renderer.drawPoints(kpl, RGBColour.MAGENTA, 3);

      MBFImage matches;
      if (this.matcher.findMatches(kpl)) {
        try {
          // Shape sh =
          // modelImage.getBounds().transform(((MatrixTransformProvider)
          // matcher.getModel()).getTransform().inverse());
          // renderer.drawShape(sh, 3, RGBColour.BLUE);
          final Matrix boundsToPoly =
              ((MatrixTransformProvider) this.matcher.getModel()).getTransform().inverse();
          this.renderMode.render(renderer, boundsToPoly, this.modelImage.getBounds());
        } catch (final RuntimeException e) {
        }

        matches =
            MatchingUtilities.drawMatches(
                this.modelImage, capImg, this.matcher.getMatches(), RGBColour.RED);
      } else {
        matches =
            MatchingUtilities.drawMatches(
                this.modelImage, capImg, this.matcher.getMatches(), RGBColour.RED);
      }

      this.matchPanel.setPreferredSize(this.matchPanel.getSize());
      this.matchFrame.setImage(ImageUtilities.createBufferedImageForDisplay(matches));
    }
  }
  public static void extractImage(
      String rootDir,
      String scaledDir,
      long subjectId,
      long imageId,
      String fileName,
      int haarMinSize,
      int scaledSize)
      throws IOException {
    FileInputStream input = new FileInputStream(fileName);
    MBFImage image = ImageUtilities.readMBF(input);
    BufferedImage detectedFacesImage = ImageIO.read(new File(fileName));
    // ImageUtils.displayImage(detectedFacesImage);
    FaceDetector<DetectedFace, FImage> fd = new HaarCascadeDetector(haarMinSize);
    List<DetectedFace> faces = fd.detectFaces(Transforms.calculateIntensity(image));
    System.out.println("# Found faces, one per line.");
    System.out.println("# <x>, <y>, <width>, <height>");
    Iterator<DetectedFace> iterator = faces.iterator();
    BufferedImage extractFaceImage = null;
    if (iterator.hasNext()) {
      DetectedFace face = iterator.next();
      Rectangle bounds = face.getBounds();
      //            extractFaceImage = detectedFacesImage.getSubimage((int)bounds.x, (int)bounds.y,
      // (int)bounds.width, (int)bounds.height);
      //            writeImage(extractFaceImage, rootDir, "extracted", subjectId, imageId);

      //            BufferedImage scaledBufferedImage = scale(extractFaceImage, scaledSize);
      //            String scaledFileName = scaledDir + subjectId + "-" + imageId + ".jpg";
      //            System.out.println("scaledFileName = " + scaledFileName);
      //            ImageIO.write(scaledBufferedImage, "jpg", new File(scaledFileName));
      Graphics g = detectedFacesImage.createGraphics();
      g.setColor(Color.GREEN);
      g.drawRect((int) bounds.x, (int) bounds.y, (int) bounds.width, (int) bounds.height);
      System.out.println(bounds.x + ";" + bounds.y + ";" + bounds.width + ";" + bounds.height);
    } else {
      Graphics g = detectedFacesImage.createGraphics();
      g.setColor(Color.GREEN);
      g.drawString("No Image Detected", 20, 20);
    }
    writeImage(detectedFacesImage, rootDir, "detected", subjectId, imageId);
    //
    //        FaceDetector < KEDetectedFace , FImage > fdK = new FKEFaceDetector () ;
    //        List < KEDetectedFace > facesK = fdK.detectFaces ( Transforms.calculateIntensity (
    // image ) ) ;
    //        if (!facesK.isEmpty()) {
    //        	System.out.println("Found face");
    //        	KEDetectedFace detectedKeyFace = facesK.get(0);
    //        	FacialKeypoint[] keypoints = detectedKeyFace.getKeypoints();
    //        	for (FacialKeypoint keypoint: keypoints) {
    //        		System.out.println("keypoint:" + keypoint);
    //        	}
    //        }

  }
示例#5
0
  /**
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    final FImage img =
        ResizeProcessor.halfSize(
            ImageUtilities.readF(
                new File("/Users/jsh2/Dropbox/Photos/Sample Album/Boston City Flow.jpg")));

    final long t1 = System.currentTimeMillis();
    final LeungMalikFilterBank fb = new LeungMalikFilterBank();
    fb.analyseImage(img);
    final long t2 = System.currentTimeMillis();

    System.out.println(t2 - t1);
  }
示例#6
0
  /**
   * Main method. First arg is the csv; second is the output directory.
   *
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    final File inputCSV = new File(args[0]);
    final File outputDirBase = new File(args[1]);

    System.setOut(new PrintStream(System.out, true, "UTF-8"));

    final BufferedReader br =
        new BufferedReader(new InputStreamReader(new FileInputStream(inputCSV), "UTF-8"));

    String it;
    while ((it = br.readLine()) != null) {
      final String[] parts = it.split(csvregex);

      final String url = parts[2];
      System.out.println(url);

      final File dir =
          new File(
              outputDirBase,
              parts[0].replace("\"", "")
                  + "/"
                  + parts[1]
                  + "/"
                  + url.replace(":", "|").replace("/", "_"));
      final File layoutfile = new File(dir, "layout.csv");
      final File imagefile = new File(dir, "render.png");

      if (dir.exists()) continue;
      if (!dir.mkdirs()) continue;

      final LayoutExtractor le = new LayoutExtractor(30000L); // timeout
      // after 30s
      if (le.load(url)) {
        final PrintWriter layoutfilePW = new PrintWriter(new FileWriter(layoutfile));

        final List<ElementInfo> info = le.getLayoutInfo();
        layoutfilePW.println(ElementInfo.getCSVHeader());
        for (final ElementInfo ei : info) {
          layoutfilePW.println(ei.toCSVString());
        }

        layoutfilePW.close();

        final MBFImage image = le.render(1024, 768);
        if (image != null) ImageUtilities.write(image, imagefile);
      }
    }

    br.close();
  }
示例#7
0
  @Override
  public synchronized void keyPressed(final KeyEvent key) {
    if (key.getKeyCode() == KeyEvent.VK_SPACE) {
      this.videoFrame.togglePause();
    } else if (key.getKeyChar() == 'c'
        && this.polygonListener.getPolygon().getVertices().size() > 2) {
      try {
        final Polygon p = this.polygonListener.getPolygon().clone();
        this.polygonListener.reset();
        this.modelImage =
            this.capture
                .getCurrentFrame()
                .process(new PolygonExtractionProcessor<Float[], MBFImage>(p, RGBColour.BLACK));

        if (this.matcher == null) {
          // configure the matcher
          final HomographyModel model = new HomographyModel();
          final RANSAC<Point2d, Point2d, HomographyModel> ransac =
              new RANSAC<Point2d, Point2d, HomographyModel>(
                  model,
                  new SingleImageTransferResidual2d<HomographyModel>(),
                  3.0,
                  1500,
                  new RANSAC.ProbabilisticMinInliersStoppingCondition(0.01),
                  true);
          this.matcher =
              new ConsistentLocalFeatureMatcher2d<Keypoint>(
                  new FastBasicKeypointMatcher<Keypoint>(8));
          this.matcher.setFittingModel(ransac);

          this.modelPanel.setPreferredSize(this.modelPanel.getSize());
        }

        this.modelFrame.setImage(ImageUtilities.createBufferedImageForDisplay(this.modelImage));

        final DoGColourSIFTEngine engine = new DoGColourSIFTEngine();
        engine.getOptions().setDoubleInitialImage(true);

        this.matcher.setModelFeatures(engine.findFeatures(this.modelImage));
      } catch (final Exception e) {
        e.printStackTrace();
      }
    } else if (key.getKeyChar() == '1') {
      this.renderMode = RenderMode.SQUARE;
    } else if (key.getKeyChar() == '2') {
      this.renderMode = RenderMode.PICTURE;
    } else if (key.getKeyChar() == '3') {
      this.renderMode = RenderMode.VIDEO;
    }
  }
  /**
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    FImage img1, img2;

    int nFeatures = 100;
    TrackingContext tc = new TrackingContext();
    FeatureList fl = new FeatureList(nFeatures);
    KLTTracker tracker = new KLTTracker(tc, fl);

    System.out.println(tc);

    img1 = ImageUtilities.readF(Example1.class.getResourceAsStream("img0.pgm"));
    img2 = ImageUtilities.readF(Example1.class.getResourceAsStream("img1.pgm"));

    tracker.selectGoodFeatures(img1);

    System.out.println("\nIn first image:\n");
    for (int i = 0; i < fl.features.length; i++) {
      System.out.format(
          "Feature #%d:  (%f,%f) with value of %d\n",
          i, fl.features[i].x, fl.features[i].y, fl.features[i].val);
    }

    DisplayUtilities.display(fl.drawFeatures(img1));
    fl.writeFeatureList(null, "%3d");

    tracker.trackFeatures(img1, img2);

    System.out.println("\nIn second image:\n");
    for (int i = 0; i < fl.features.length; i++) {
      System.out.format(
          "Feature #%d:  (%f,%f) with value of %d\n",
          i, fl.features[i].x, fl.features[i].y, fl.features[i].val);
    }

    DisplayUtilities.display(fl.drawFeatures(img2));
    fl.writeFeatureList(null, "%5.1f");
  }
  /**
   * an example run
   *
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    float sd = 5;
    float si = 1.4f * sd;
    HessianIPD ipd = new HessianIPD(sd, si);
    FImage img =
        ImageUtilities.readF(
            AffineAdaption.class.getResourceAsStream("/org/openimaj/image/data/sinaface.jpg"));

    //		img = img.multiply(255f);

    //		ipd.findInterestPoints(img);
    //		List<InterestPointData> a = ipd.getInterestPoints(1F/(256*256));
    //
    //		System.out.println("Found " + a.size() + " features");
    //
    //		AffineAdaption adapt = new AffineAdaption();
    //		EllipticKeyPoint kpt = new EllipticKeyPoint();
    MBFImage outImg = new MBFImage(img.clone(), img.clone(), img.clone());
    //		for (InterestPointData d : a) {
    //
    ////			InterestPointData d = new InterestPointData();
    ////			d.x = 102;
    ////			d.y = 396;
    //			logger.info("Keypoint at: " + d.x + ", " + d.y);
    //			kpt.si = si;
    //			kpt.centre = new Pixel(d.x, d.y);
    //			kpt.size = 2 * 3 * kpt.si;
    //
    //			boolean converge = adapt.calcAffineAdaptation(img, kpt);
    //			if(converge)
    //			{
    //				outImg.drawShape(new
    // Ellipse(kpt.centre.x,kpt.centre.y,kpt.axes.getX(),kpt.axes.getY(),kpt.phi), RGBColour.BLUE);
    //				outImg.drawPoint(kpt.centre, RGBColour.RED,3);
    //			}
    //
    //
    //
    //			logger.info("... converged: "+ converge);
    //		}
    AffineAdaption adapt = new AffineAdaption(ipd, new IPDSelectionMode.Count(100));
    adapt.findInterestPoints(img);
    InterestPointVisualiser<Float[], MBFImage> ipv =
        InterestPointVisualiser.visualiseInterestPoints(outImg, adapt.points);
    DisplayUtilities.display(ipv.drawPatches(RGBColour.BLUE, RGBColour.RED));
  }
示例#10
0
      @Override
      public void render(
          final MBFImageRenderer renderer, final Matrix transform, final Rectangle rectangle) {
        if (this.toRender == null) {
          try {
            this.toRender =
                ImageUtilities.readMBF(
                    VideoColourSIFT.class.getResource("/org/openimaj/demos/OpenIMAJ.png"));
          } catch (final IOException e) {
            System.err.println("Can't load image to render");
          }
          this.renderToBounds =
              TransformUtilities.makeTransform(this.toRender.getBounds(), rectangle);
        }

        final MBFProjectionProcessor mbfPP = new MBFProjectionProcessor();
        mbfPP.setMatrix(transform.times(this.renderToBounds));
        mbfPP.accumulate(this.toRender);
        mbfPP.performProjection(0, 0, renderer.getImage());
      }
示例#11
0
  /**
   * Testing
   *
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    FImage image = ImageUtilities.readF(new File("/Users/jsh2/Desktop/image.png"));
    FImage template = image.extractROI(100, 100, 100, 100);
    image.fill(0f);
    image.drawImage(template, 100, 100);

    TemplateMatcher matcher = new TemplateMatcher(template, Mode.CORRELATION);
    matcher.setSearchBounds(new Rectangle(100, 100, 200, 200));
    image.analyseWith(matcher);
    DisplayUtilities.display(matcher.responseMap.normalise());

    MBFImage cimg = image.toRGB();
    for (FValuePixel p : matcher.getBestResponses(10)) {
      System.out.println(p);
      cimg.drawPoint(p, RGBColour.RED, 1);
    }

    cimg.drawShape(matcher.getSearchBounds(), RGBColour.BLUE);
    cimg.drawShape(new Rectangle(100, 100, 100, 100), RGBColour.GREEN);

    DisplayUtilities.display(cimg);
  }
 /**
  * Setup tests
  *
  * @throws IOException
  */
 @Before
 public void setup() throws IOException {
   image = ImageUtilities.readF(OpenIMAJ.getLogoAsStream());
   analyser = new BinnedWindowedExtractor(64);
   image.analyseWith(analyser);
 }
 /**
  * Setup the test data
  *
  * @throws IOException
  */
 @Before
 public void setup() throws IOException {
   image =
       ImageUtilities.readF(getClass().getResourceAsStream("/org/openimaj/image/data/bird.png"));
   template = image.extractROI(100, 100, 100, 100);
 }