/*
   * Selects the integration scale that maximize LoG in point c
   */
  float selIntegrationScale(final FImage image, float si, Pixel c) {
    FImage L;
    int cx = c.x;
    int cy = c.y;
    float maxLap = -Float.MAX_VALUE;
    float maxsx = si;
    float sigma, sigma_prev = 0;

    L = image.clone();
    /*
     * Search best integration scale between previous and successive layer
     */
    for (float u = 0.7f; u <= 1.41; u += 0.1) {
      float sik = u * si;
      sigma = (float) Math.sqrt(Math.pow(sik, 2) - Math.pow(sigma_prev, 2));

      L.processInplace(new FGaussianConvolve(sigma, 3));

      sigma_prev = sik;
      //			Lap = L.process(LAPLACIAN_KERNEL_CONV);

      float lapVal = sik * sik * Math.abs(LAPLACIAN_KERNEL_CONV.responseAt(cx, cy, L));
      //			float lapVal = sik * sik * Math.abs(Lap.pixels[cy][cx]);

      if (lapVal >= maxLap) {
        maxLap = lapVal;
        maxsx = sik;
      }
    }
    return maxsx;
  }
  AbstractStructureTensorIPD selDifferentiationScaleFast(
      FImage img, AbstractStructureTensorIPD ipd, float si, Pixel c) {
    AbstractStructureTensorIPD best = ipd.clone();
    float s = 0.75f;
    float sigma;
    FImage L;
    L = img.clone();
    float sd = s * si;

    // Smooth previous smoothed image L
    sigma = sd;

    L.processInplace(new FGaussianConvolve(sigma, 3));

    // X and Y derivatives
    best.setDetectionScale(sd);
    best.setIntegrationScale(si);
    best.setImageBlurred(true);

    best.findInterestPoints(L);

    //		M = calcSecondMomentMatrix(best, c.x, c.y);

    //		EigenValueVectorPair meig = MatrixUtils.symmetricEig2x2(M);
    //		Matrix eval = meig.getD();
    //		double eval1 = Math.abs(eval.get(0, 0));
    //		double eval2 = Math.abs(eval.get(1, 1));

    return best;
  }
  @Override
  public void beforeUpdate(MBFImage frame) {
    FImage gimg = frame.flatten();

    // bring to sigma = 1.0
    gimg.processInplace(new FGaussianConvolve(1f));

    for (int i = 0; i < 3; i++) {
      final int t = (int) Math.pow(2, i);
      final double sigma = Math.sqrt(t);
      final float sf = t;

      harris.setDetectionScale((float) sigma);
      harris.setImageBlurred(true);
      harris.findInterestPoints(gimg);

      final float iscale = harris.getIntegrationScale();
      final float samplesize = 4 * iscale + 1;

      for (final InterestPointData ipd : harris.getInterestPoints((float) (1e-5))) {
        ipd.x *= sf;
        ipd.y *= sf;
        frame.drawShape(new Circle(ipd, sf * samplesize), RGBColour.RED);
      }

      gimg.processInplace(new FGaussianConvolve((float) Math.sqrt((Math.pow(2, i + 1) - t))));
      gimg = ResizeProcessor.halfSize(gimg);
    }
  }
 @Override
 public void run() {
   for (MBFImage image : this.stream) {
     FImage gimage = Transforms.calculateIntensityNTSC(image);
     //			gimage.processInplace(this.thresholder);
     gimage.threshold(0.7f);
     displayFrame(gimage);
     //			trackInputs(image);
     //			handleInputs(image);
   }
 }
  private void displayCurrentPatch(
      FImage unwarped,
      float unwarpedx,
      float unwarpedy,
      FImage warped,
      int warpedx,
      int warpedy,
      Matrix transform,
      float scale) {
    DisplayUtilities.createNamedWindow("warpunwarp", "Warped and Unwarped Image", true);
    logger.debug("Displaying patch");
    float resizeScale = 5f;
    float warppedPatchScale = resizeScale;
    ResizeProcessor patchSizer = new ResizeProcessor(resizeScale);
    FImage warppedPatchGrey = warped.process(patchSizer);
    MBFImage warppedPatch =
        new MBFImage(warppedPatchGrey.clone(), warppedPatchGrey.clone(), warppedPatchGrey.clone());
    float x = warpedx * warppedPatchScale;
    float y = warpedy * warppedPatchScale;
    float r = scale * warppedPatchScale;

    warppedPatch.createRenderer().drawShape(new Ellipse(x, y, r, r, 0), RGBColour.RED);
    warppedPatch.createRenderer().drawPoint(new Point2dImpl(x, y), RGBColour.RED, 3);

    FImage unwarppedPatchGrey = unwarped.clone();
    MBFImage unwarppedPatch =
        new MBFImage(
            unwarppedPatchGrey.clone(), unwarppedPatchGrey.clone(), unwarppedPatchGrey.clone());
    unwarppedPatch = unwarppedPatch.process(patchSizer);
    float unwarppedPatchScale = resizeScale;

    x = unwarpedx * unwarppedPatchScale;
    y = unwarpedy * unwarppedPatchScale;
    //		Matrix sm = state.selected.secondMoments;
    //		float scale = state.selected.scale * unwarppedPatchScale;
    //		Ellipse e = EllipseUtilities.ellipseFromSecondMoments(x, y, sm, scale*2);
    Ellipse e =
        EllipseUtilities.fromTransformMatrix2x2(transform, x, y, scale * unwarppedPatchScale);

    unwarppedPatch.createRenderer().drawShape(e, RGBColour.BLUE);
    unwarppedPatch.createRenderer().drawPoint(new Point2dImpl(x, y), RGBColour.RED, 3);
    // give the patch a border (10px, black)
    warppedPatch = warppedPatch.padding(5, 5, RGBColour.BLACK);
    unwarppedPatch = unwarppedPatch.padding(5, 5, RGBColour.BLACK);

    MBFImage displayArea =
        warppedPatch.newInstance(warppedPatch.getWidth() * 2, warppedPatch.getHeight());
    displayArea.createRenderer().drawImage(warppedPatch, 0, 0);
    displayArea.createRenderer().drawImage(unwarppedPatch, warppedPatch.getWidth(), 0);
    DisplayUtilities.displayName(displayArea, "warpunwarp");
    logger.debug("Done");
  }
  /**
   * an example run
   *
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    float sd = 5;
    float si = 1.4f * sd;
    HessianIPD ipd = new HessianIPD(sd, si);
    FImage img =
        ImageUtilities.readF(
            AffineAdaption.class.getResourceAsStream("/org/openimaj/image/data/sinaface.jpg"));

    //		img = img.multiply(255f);

    //		ipd.findInterestPoints(img);
    //		List<InterestPointData> a = ipd.getInterestPoints(1F/(256*256));
    //
    //		System.out.println("Found " + a.size() + " features");
    //
    //		AffineAdaption adapt = new AffineAdaption();
    //		EllipticKeyPoint kpt = new EllipticKeyPoint();
    MBFImage outImg = new MBFImage(img.clone(), img.clone(), img.clone());
    //		for (InterestPointData d : a) {
    //
    ////			InterestPointData d = new InterestPointData();
    ////			d.x = 102;
    ////			d.y = 396;
    //			logger.info("Keypoint at: " + d.x + ", " + d.y);
    //			kpt.si = si;
    //			kpt.centre = new Pixel(d.x, d.y);
    //			kpt.size = 2 * 3 * kpt.si;
    //
    //			boolean converge = adapt.calcAffineAdaptation(img, kpt);
    //			if(converge)
    //			{
    //				outImg.drawShape(new
    // Ellipse(kpt.centre.x,kpt.centre.y,kpt.axes.getX(),kpt.axes.getY(),kpt.phi), RGBColour.BLUE);
    //				outImg.drawPoint(kpt.centre, RGBColour.RED,3);
    //			}
    //
    //
    //
    //			logger.info("... converged: "+ converge);
    //		}
    AffineAdaption adapt = new AffineAdaption(ipd, new IPDSelectionMode.Count(100));
    adapt.findInterestPoints(img);
    InterestPointVisualiser<Float[], MBFImage> ipv =
        InterestPointVisualiser.visualiseInterestPoints(outImg, adapt.points);
    DisplayUtilities.display(ipv.drawPatches(RGBColour.BLUE, RGBColour.RED));
  }
  /**
   * Test that the windowed histogram produced with {@link HistogramAnalyser} matches the one
   * produced with {@link BinnedWindowedExtractor}.
   */
  @Test
  public void testWindowedHistogram() {
    final Rectangle roi = new Rectangle(50, 10, 100, 100);

    final Histogram hist1 = HistogramAnalyser.getHistogram(image.extractROI(roi), analyser.nbins);
    final Histogram hist2 = analyser.computeHistogram(roi);

    assertArrayEquals(hist1.values, hist2.values, 0.0001);
  }
  /**
   * Extract a rectangular image patch centered on the feature with the same primary orientation and
   * a given scale factor.
   *
   * @param image the image to extract from
   * @param sf the scale factor
   * @return a rectangular image patch
   */
  public FImage extractEllipsePatch(FImage image, double sf) {
    double[] data = getEllipseBoundingRectsData(sf);
    double height = data[1], width = data[0], ori = data[2];

    int sx = (int) Math.rint(width);
    int sy = (int) Math.rint(height);

    FImage patch = new FImage(sx, sy);

    // extract pixels
    for (int y = 0; y < sy; y++) {
      for (int x = 0; x < sx; x++) {
        double xbar = x - sx / 2.0;
        double ybar = y - sy / 2.0;

        double xx = (xbar * Math.cos(ori) - ybar * Math.sin(ori)) + mx;
        double yy = (xbar * Math.sin(ori) + ybar * Math.cos(ori)) + my;

        patch.setPixel(x, y, image.getPixelInterp(xx, yy));
      }
    }

    return patch;
  }
Пример #9
0
  /**
   * Testing
   *
   * @param args
   * @throws IOException
   */
  public static void main(String[] args) throws IOException {
    FImage image = ImageUtilities.readF(new File("/Users/jsh2/Desktop/image.png"));
    FImage template = image.extractROI(100, 100, 100, 100);
    image.fill(0f);
    image.drawImage(template, 100, 100);

    TemplateMatcher matcher = new TemplateMatcher(template, Mode.CORRELATION);
    matcher.setSearchBounds(new Rectangle(100, 100, 200, 200));
    image.analyseWith(matcher);
    DisplayUtilities.display(matcher.responseMap.normalise());

    MBFImage cimg = image.toRGB();
    for (FValuePixel p : matcher.getBestResponses(10)) {
      System.out.println(p);
      cimg.drawPoint(p, RGBColour.RED, 1);
    }

    cimg.drawShape(matcher.getSearchBounds(), RGBColour.BLUE);
    cimg.drawShape(new Rectangle(100, 100, 100, 100), RGBColour.GREEN);

    DisplayUtilities.display(cimg);
  }
 /**
  * Setup tests
  *
  * @throws IOException
  */
 @Before
 public void setup() throws IOException {
   image = ImageUtilities.readF(OpenIMAJ.getLogoAsStream());
   analyser = new BinnedWindowedExtractor(64);
   image.analyseWith(analyser);
 }
 /**
  * Setup the test data
  *
  * @throws IOException
  */
 @Before
 public void setup() throws IOException {
   image =
       ImageUtilities.readF(getClass().getResourceAsStream("/org/openimaj/image/data/bird.png"));
   template = image.extractROI(100, 100, 100, 100);
 }
Пример #12
0
 @Override
 public FImage prepareTemplate(FImage template) {
   return template.process(new MeanCenter());
 }
  /*
   * Selects diffrentiation scale
   */
  AbstractStructureTensorIPD selDifferentiationScale(
      FImage img, AbstractStructureTensorIPD ipdToUse, float si, Pixel c) {
    AbstractStructureTensorIPD best = null;
    float s = 0.5f;
    float sigma_prev = 0, sigma;

    FImage L;

    double qMax = 0;

    L = img.clone();

    AbstractStructureTensorIPD ipd = ipdToUse.clone();

    while (s <= 0.751) {
      Matrix M;
      float sd = s * si;

      // Smooth previous smoothed image L
      sigma = (float) Math.sqrt(Math.pow(sd, 2) - Math.pow(sigma_prev, 2));

      L.processInplace(new FGaussianConvolve(sigma, 3));
      sigma_prev = sd;

      // X and Y derivatives
      ipd.setDetectionScale(sd);
      ipd.setIntegrationScale(si);
      ipd.setImageBlurred(true);

      ipd.findInterestPoints(L);
      //			FImage Lx = L.process(new FConvolution(DX_KERNEL.multiply(sd)));
      //			FImage Ly = L.process(new FConvolution(DY_KERNEL.multiply(sd)));
      //
      //			FGaussianConvolve gauss = new FGaussianConvolve(si, 3);
      //
      //			FImage Lxm2 = Lx.multiply(Lx);
      //			dx2 = Lxm2.process(gauss);
      //
      //			FImage Lym2 = Ly.multiply(Ly);
      //			dy2 = Lym2.process(gauss);
      //
      //			FImage Lxmy = Lx.multiply(Ly);
      //			dxy = Lxmy.process(gauss);

      M = calcSecondMomentMatrix(ipd, c.x, c.y);

      // calc eigenvalues
      //			EigenvalueDecomposition meig = M.eig();
      EigenValueVectorPair meig = MatrixUtils.symmetricEig2x2(M);
      Matrix eval = meig.getValues();
      double eval1 = Math.abs(eval.get(0, 0));
      double eval2 = Math.abs(eval.get(1, 1));
      double q = Math.min(eval1, eval2) / Math.max(eval1, eval2);

      if (q >= qMax) {
        qMax = q;
        best = ipd.clone();
      }

      s += 0.05;
    }
    return best;
  }
  /*
   * Performs affine adaptation
   */
  boolean calcAffineAdaptation(
      final FImage fimage, EllipticInterestPointData kpt, AbstractStructureTensorIPD ipd) {
    //		DisplayUtilities.createNamedWindow("warp", "Warped Image ROI",true);
    Matrix transf = new Matrix(2, 3); // Transformation matrix
    Point2dImpl c = new Point2dImpl(); // Transformed point
    Point2dImpl p = new Point2dImpl(); // Image point

    Matrix U = Matrix.identity(2, 2); // Normalization matrix

    Matrix Mk = U.copy();
    FImage img_roi, warpedImg = new FImage(1, 1);
    float Qinv = 1, q, si = kpt.scale; // sd = 0.75f * si;
    float kptSize = 2 * 3 * kpt.scale;
    boolean divergence = false, convergence = false;
    int i = 0;

    // Coordinates in image
    int py = (int) kpt.y;
    int px = (int) kpt.x;

    // Roi coordinates
    int roix, roiy;

    // Coordinates in U-trasformation
    int cx = px;
    int cy = py;
    int cxPr = cx;
    int cyPr = cy;

    float radius = kptSize / 2 * 1.4f;
    float half_width, half_height;

    Rectangle roi;

    // Affine adaptation
    while (i <= 10 && !divergence && !convergence) {
      // Transformation matrix
      MatrixUtils.zero(transf);
      transf.setMatrix(0, 1, 0, 1, U);

      kpt.setTransform(U);

      Rectangle boundingBox = new Rectangle();

      double ac_b2 = U.det();
      boundingBox.width = (float) Math.ceil(U.get(1, 1) / ac_b2 * 3 * si * 1.4);
      boundingBox.height = (float) Math.ceil(U.get(0, 0) / ac_b2 * 3 * si * 1.4);

      // Create window around interest point
      half_width = Math.min((float) Math.min(fimage.width - px - 1, px), boundingBox.width);
      half_height = Math.min((float) Math.min(fimage.height - py - 1, py), boundingBox.height);

      if (half_width <= 0 || half_height <= 0) return divergence;

      roix = Math.max(px - (int) boundingBox.width, 0);
      roiy = Math.max(py - (int) boundingBox.height, 0);
      roi = new Rectangle(roix, roiy, px - roix + half_width + 1, py - roiy + half_height + 1);

      // create ROI
      img_roi = fimage.extractROI(roi);

      // Point within the ROI
      p.x = px - roix;
      p.y = py - roiy;

      // Find coordinates of square's angles to find size of warped ellipse's bounding box
      float u00 = (float) U.get(0, 0);
      float u01 = (float) U.get(0, 1);
      float u10 = (float) U.get(1, 0);
      float u11 = (float) U.get(1, 1);

      float minx = u01 * img_roi.height < 0 ? u01 * img_roi.height : 0;
      float miny = u10 * img_roi.width < 0 ? u10 * img_roi.width : 0;
      float maxx =
          (u00 * img_roi.width > u00 * img_roi.width + u01 * img_roi.height
                  ? u00 * img_roi.width
                  : u00 * img_roi.width + u01 * img_roi.height)
              - minx;
      float maxy =
          (u11 * img_roi.width > u10 * img_roi.width + u11 * img_roi.height
                  ? u11 * img_roi.height
                  : u10 * img_roi.width + u11 * img_roi.height)
              - miny;

      // Shift
      transf.set(0, 2, -minx);
      transf.set(1, 2, -miny);

      if (maxx >= 2 * radius + 1 && maxy >= 2 * radius + 1) {
        // Size of normalized window must be 2*radius
        // Transformation
        FImage warpedImgRoi;
        FProjectionProcessor proc = new FProjectionProcessor();
        proc.setMatrix(transf);
        img_roi.accumulateWith(proc);
        warpedImgRoi = proc.performProjection(0, (int) maxx, 0, (int) maxy, null);

        //				DisplayUtilities.displayName(warpedImgRoi.clone().normalise(), "warp");

        // Point in U-Normalized coordinates
        c = p.transform(U);
        cx = (int) (c.x - minx);
        cy = (int) (c.y - miny);

        if (warpedImgRoi.height > 2 * radius + 1 && warpedImgRoi.width > 2 * radius + 1) {
          // Cut around normalized patch
          roix = (int) Math.max(cx - Math.ceil(radius), 0.0);
          roiy = (int) Math.max(cy - Math.ceil(radius), 0.0);
          roi =
              new Rectangle(
                  roix,
                  roiy,
                  cx - roix + (float) Math.min(Math.ceil(radius), warpedImgRoi.width - cx - 1) + 1,
                  cy
                      - roiy
                      + (float) Math.min(Math.ceil(radius), warpedImgRoi.height - cy - 1)
                      + 1);
          warpedImg = warpedImgRoi.extractROI(roi);

          // Coordinates in cutted ROI
          cx = cx - roix;
          cy = cy - roiy;
        } else {
          warpedImg.internalAssign(warpedImgRoi);
        }

        if (logger.getLevel() == Level.DEBUG) {
          displayCurrentPatch(
              img_roi.clone().normalise(),
              p.x,
              p.y,
              warpedImg.clone().normalise(),
              cx,
              cy,
              U,
              si * 3);
        }

        // Integration Scale selection
        si = selIntegrationScale(warpedImg, si, new Pixel(cx, cy));

        // Differentation scale selection
        if (fastDifferentiationScale) {
          ipd = selDifferentiationScaleFast(warpedImg, ipd, si, new Pixel(cx, cy));
        } else {
          ipd = selDifferentiationScale(warpedImg, ipd, si, new Pixel(cx, cy));
        }

        if (ipd.maxima.size() == 0) {
          divergence = true;
          continue;
        }
        // Spatial Localization
        cxPr = cx; // Previous iteration point in normalized window
        cyPr = cy;
        //
        //				float cornMax = 0;
        //				for (int j = 0; j < 3; j++)
        //				{
        //					for (int t = 0; t < 3; t++)
        //					{
        //						float dx2 = Lxm2smooth.pixels[cyPr - 1 + j][cxPr - 1 + t];
        //						float dy2 = Lym2smooth.pixels[cyPr - 1 + j][cxPr - 1 + t];
        //						float dxy = Lxmysmooth.pixels[cyPr - 1 + j][cxPr - 1 + t];
        //						float det = dx2 * dy2 - dxy * dxy;
        //						float tr = dx2 + dy2;
        //						float cornerness = (float) (det - (0.04 * Math.pow(tr, 2)));
        //
        //						if (cornerness > cornMax) {
        //							cornMax = cornerness;
        //							cx = cxPr - 1 + t;
        //							cy = cyPr - 1 + j;
        //						}
        //					}
        //				}

        FValuePixel max = ipd.findMaximum(new Rectangle(cxPr - 1, cyPr - 1, 3, 3));
        cx = max.x;
        cy = max.y;

        // Transform point in image coordinates
        p.x = px;
        p.y = py;

        // Displacement vector
        c.x = cx - cxPr;
        c.y = cy - cyPr;

        // New interest point location in image
        p.translate(c.transform(U.inverse()));
        px = (int) p.x;
        py = (int) p.y;

        q = calcSecondMomentSqrt(ipd, new Pixel(cx, cy), Mk);

        float ratio = 1 - q;

        // if ratio == 1 means q == 0 and one axes equals to 0
        if (!Float.isNaN(ratio) && ratio != 1) {
          // Update U matrix
          U = U.times(Mk);

          Matrix uVal, uV;
          //					EigenvalueDecomposition ueig = U.eig();
          EigenValueVectorPair ueig = MatrixUtils.symmetricEig2x2(U);
          uVal = ueig.getValues();
          uV = ueig.getVectors();

          Qinv = normMaxEval(U, uVal, uV);

          // Keypoint doesn't converge
          if (Qinv >= 6) {
            logger.debug("QInverse too large, feature too edge like, affine divergence!");
            divergence = true;
          } else if (ratio <= 0.05) { // Keypoint converges
            convergence = true;

            // Set transformation matrix
            MatrixUtils.zero(transf);
            transf.setMatrix(0, 1, 0, 1, U);
            // The order here matters, setTransform uses the x and y to calculate a new ellipse
            kpt.x = px;
            kpt.y = py;
            kpt.scale = si;
            kpt.setTransform(U);
            kpt.score = max.value;

            //						ax1 = (float) (1 / Math.abs(uVal.get(1, 1)) * 3 * si);
            //						ax2 = (float) (1 / Math.abs(uVal.get(0, 0)) * 3 * si);
            //						phi = Math.atan(uV.get(1, 1) / uV.get(0, 1));
            //						kpt.axes = new Point2dImpl(ax1, ax2);
            //						kpt.phi = phi;
            //						kpt.centre = new Pixel(px, py);
            //						kpt.si = si;
            //						kpt.size = 2 * 3 * si;

          } else {
            radius = (float) (3 * si * 1.4);
          }
        } else {
          logger.debug("QRatio was close to 0, affine divergence!");
          divergence = true;
        }
      } else {
        logger.debug("Window size has grown too fast, scale divergence!");
        divergence = true;
      }

      ++i;
    }
    if (!divergence && !convergence) {
      logger.debug("Reached max iterations!");
    }
    return convergence;
  }
 @Override
 public void findInterestPoints(FImage image) {
   findInterestPoints(image, image.getBounds());
 }