private void displayMarkerImage(Mat srcImgMat, Mat destImageMat) {
   // find location of image segment to be replaced in the destination image.
   Rect rect = calculateImageSegmentArea(destImageMat);
   Mat destSubmat = destImageMat.submat(rect.y, rect.y + rect.height, rect.x, rect.x + rect.width);
   // copy image.
   srcImgMat.copyTo(destSubmat);
 }
示例#2
0
  public void performMatch() {

    // create feature detectors and feature extractors
    FeatureDetector orbDetector = FeatureDetector.create(FeatureDetector.ORB);
    DescriptorExtractor orbExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

    // set the keypoints
    keyPointImg = new MatOfKeyPoint();
    orbDetector.detect(imgGray, keyPointImg);

    MatOfKeyPoint keyPointTempl = new MatOfKeyPoint();
    orbDetector.detect(templGray, keyPointTempl);

    // get the descriptions
    descImg = new Mat(image.size(), image.type());
    orbExtractor.compute(imgGray, keyPointImg, descImg);

    Mat descTempl = new Mat(template.size(), template.type());
    orbExtractor.compute(templGray, keyPointTempl, descTempl);

    // perform matching
    matches = new MatOfDMatch();
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    matcher.match(descImg, descTempl, matches);

    Log.i("perform match result", matches.size().toString());
  }
示例#3
0
  void drawFrame(Mat modified) {
    // Partly from OpenCV CameraBridgeViewBase.java
    if (mCacheBitmap == null) {
      mCacheBitmap =
          Bitmap.createBitmap(modified.width(), modified.height(), Bitmap.Config.ARGB_8888);
    }
    boolean bmpValid = true;
    if (modified != null) {
      try {
        Utils.matToBitmap(modified, mCacheBitmap);
      } catch (Exception e) {
        Log.e(TAG, "Mat type: " + modified);
        Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
        Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
        bmpValid = false;
      }
    }
    if (bmpValid && mCacheBitmap != null) {
      Canvas canvas = view.getHolder().lockCanvas();
      if (canvas != null) {
        canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
        canvas.drawBitmap(
            mCacheBitmap,
            new Rect(0, 0, mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
            new Rect(
                (canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
                (canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
                (canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()),
            null);

        view.getHolder().unlockCanvasAndPost(canvas);
      }
    }
  }
  public void setHsvColor(Scalar hsvColor) {
    double minH =
        (hsvColor.val[0] >= mColorRadius.val[0]) ? hsvColor.val[0] - mColorRadius.val[0] : 0;
    double maxH =
        (hsvColor.val[0] + mColorRadius.val[0] <= 255)
            ? hsvColor.val[0] + mColorRadius.val[0]
            : 255;

    mLowerBound.val[0] = minH;
    mUpperBound.val[0] = maxH;

    mLowerBound.val[1] = hsvColor.val[1] - mColorRadius.val[1];
    mUpperBound.val[1] = hsvColor.val[1] + mColorRadius.val[1];

    mLowerBound.val[2] = hsvColor.val[2] - mColorRadius.val[2];
    mUpperBound.val[2] = hsvColor.val[2] + mColorRadius.val[2];

    mLowerBound.val[3] = 0;
    mUpperBound.val[3] = 255;

    Mat spectrumHsv = new Mat(1, (int) (maxH - minH), CvType.CV_8UC3);

    for (int j = 0; j < maxH - minH; j++) {
      byte[] tmp = {(byte) (minH + j), (byte) 255, (byte) 255};
      spectrumHsv.put(0, j, tmp);
    }

    Imgproc.cvtColor(spectrumHsv, mSpectrum, Imgproc.COLOR_HSV2RGB_FULL, 4);
  }
示例#5
0
 // javadoc: buildOpticalFlowPyramid(img, pyramid, winSize, maxLevel, withDerivatives, pyrBorder,
 // derivBorder, tryReuseInputImage)
 public static int buildOpticalFlowPyramid(
     Mat img,
     List<Mat> pyramid,
     Size winSize,
     int maxLevel,
     boolean withDerivatives,
     int pyrBorder,
     int derivBorder,
     boolean tryReuseInputImage) {
   Mat pyramid_mat = new Mat();
   int retVal =
       buildOpticalFlowPyramid_0(
           img.nativeObj,
           pyramid_mat.nativeObj,
           winSize.width,
           winSize.height,
           maxLevel,
           withDerivatives,
           pyrBorder,
           derivBorder,
           tryReuseInputImage);
   Converters.Mat_to_vector_Mat(pyramid_mat, pyramid);
   pyramid_mat.release();
   return retVal;
 }
  public List<ArtifactDetectedObject> detectObjects(BufferedImage bImage) {
    List<ArtifactDetectedObject> detectedObjectList = new ArrayList<>();
    Mat image = OpenCVUtils.bufferedImageToMat(bImage);
    if (image != null) {
      MatOfRect faceDetections = new MatOfRect();
      double width = image.width();
      double height = image.height();
      for (CascadeClassifierHolder objectClassifier : objectClassifiers) {
        objectClassifier.cascadeClassifier.detectMultiScale(image, faceDetections);

        for (Rect rect : faceDetections.toArray()) {
          ArtifactDetectedObject detectedObject =
              new ArtifactDetectedObject(
                  rect.x / width,
                  rect.y / height,
                  (rect.x + rect.width) / width,
                  (rect.y + rect.height) / height,
                  objectClassifier.conceptIRI,
                  PROCESS);
          detectedObjectList.add(detectedObject);
        }
      }
    }
    return detectedObjectList;
  }
示例#7
0
  protected static void rotateXAxis(Mat rotation) {
    // get the matrix corresponding to the rotation vector
    Mat R = new Mat(3, 3, CvType.CV_64FC1);
    Calib3d.Rodrigues(rotation, R);

    // create the matrix to rotate 90º around the X axis
    // 1, 0, 0
    // 0 cos -sin
    // 0 sin cos
    double[] rot = {
      1, 0, 0,
      0, 0, -1,
      0, 1, 0
    };
    // multiply both matrix
    Mat res = new Mat(3, 3, CvType.CV_64FC1);
    double[] prod = new double[9];
    double[] a = new double[9];
    R.get(0, 0, a);
    for (int i = 0; i < 3; i++)
      for (int j = 0; j < 3; j++) {
        prod[3 * i + j] = 0;
        for (int k = 0; k < 3; k++) {
          prod[3 * i + j] += a[3 * i + k] * rot[3 * k + j];
        }
      }
    // convert the matrix to a vector with rodrigues back
    res.put(0, 0, prod);
    Calib3d.Rodrigues(res, rotation);
  }
示例#8
0
  public static void bhadis() {

    Mat a =
        new Mat(6, 4, CvType.CV_32F) {
          {
            put(0, 0, 0, 1, 1, 5);
            put(1, 0, 1, 0, 2, 4);
            put(2, 0, 2, 2, 3, 6);
            put(3, 0, 0, 4, 6, 1);
            put(4, 0, 5, 4, 6, 1);
            put(5, 0, 4, 2, 8, 1);
          }
        };

    Mat b =
        new Mat(6, 4, CvType.CV_32F) {
          {
            put(0, 0, 4, 9, 2, 1);
            put(1, 0, 9, 6, 0, 3);
            put(2, 0, 2, 0, 8, 6);
            put(3, 0, 1, 3, 6, 9);
            put(4, 0, 5, 7, 6, 1);
            put(5, 0, 4, 2, 8, 1);
          }
        };

    System.out.println(a.dump());
    System.out.println(b.dump());

    double d = Distance.Bhattacharyya(b, b);

    System.out.println("Bhattacharyya distance = " + d);
  }
  /**
   * Finds and extracts all contours in the given Mat. Optionally also removes contours with areas
   * below that of MIN_CONTOUR_AREA.
   *
   * @param mask A mask of all resistors in the image
   * @param originalImage The original image from which the mask was created
   * @param thresholdByArea If true, remove contours below threshold
   * @return The list a found contours
   */
  private List<MatOfPoint> getContours(Mat mask, Mat originalImage, boolean thresholdByArea) {
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(
        mask,
        contours,
        hierarchy,
        Imgproc.RETR_EXTERNAL,
        Imgproc.CHAIN_APPROX_SIMPLE,
        new Point(0, 0));

    // remove any remaining noise by only keeping contours which area > threshold
    if (thresholdByArea) {
      for (int i = 0; i < contours.size(); i++) {
        double area = Imgproc.contourArea(contours.get(i));
        if (area < MIN_CONTOUR_AREA || area > 6000) {
          contours.remove(i);
          i--;
        }
      }
    }

    Mat drawing = Mat.zeros(originalImage.size(), CvType.CV_8U);

    for (int i = 0; i < contours.size(); i++) {
      Scalar color = new Scalar(255, 255, 255);
      Imgproc.drawContours(drawing, contours, i, color, 4, 8, hierarchy, 0, new Point());
    }
    paintBR(drawing);

    return contours;
  }
 private void displayMarkersDebug(Mat imgMat, Scalar contourColor, Scalar codesColor) {
   ArrayList<MatOfPoint> components = new ArrayList<MatOfPoint>();
   Mat hierarchy = new Mat();
   DtouchMarker marker = new DtouchMarker();
   boolean markerFound = findMarkers(imgMat, marker, components, hierarchy);
   if (markerFound) {
     String code = codeArrayToString(marker.getCode());
     Point codeLocation = new Point(imgMat.cols() / 4, imgMat.rows() / 8);
     Core.putText(mRgba, code, codeLocation, Core.FONT_HERSHEY_COMPLEX, 1, codesColor, 3);
     Imgproc.drawContours(
         mRgba,
         components,
         marker.getComponentIndex(),
         contourColor,
         3,
         8,
         hierarchy,
         2,
         new Point(0, 0));
   }
   if (components != null) components.clear();
   if (hierarchy != null) hierarchy.release();
   components = null;
   hierarchy = null;
 }
示例#11
0
 public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
   frame = inputFrame.rgba();
   outputFrame = frame.clone();
   if (homography != null) {
     if (markerValue != -1 && markerValue != oldMarkerValue) {
       oldMarkerValue = markerValue;
       outputLogo = logo.clone();
       Core.putText(
           outputLogo,
           Integer.toString(markerValue),
           new Point(5, 505),
           Core.FONT_HERSHEY_SIMPLEX,
           5,
           new Scalar(255, 0, 0),
           5);
     }
     Imgproc.warpPerspective(
         outputLogo,
         outputFrame,
         homography,
         new Size(WIDTH, HEIGHT),
         Imgproc.INTER_NEAREST,
         Imgproc.BORDER_TRANSPARENT,
         new Scalar(0));
   }
   return outputFrame;
 }
  /**
   * Returns square area for image segment.
   *
   * @param imgMat Source image from which to compute image segment.
   * @return square area which contains image segment.
   */
  private Rect calculateImageSegmentArea(Mat imgMat) {
    int width = imgMat.cols();
    int height = imgMat.rows();
    double aspectRatio = (double) width / (double) height;

    int imgWidth = width / 2;
    int imgHeight = height / 2;

    // Size of width and height varies of input image can vary. Make the image segment width and
    // height equal in order to make
    // the image segment square.

    // if width is more than the height.
    if (aspectRatio > 1) {
      // if total height is greater than imgWidth.
      if (height > imgWidth) imgHeight = imgWidth;
    } else if (aspectRatio < 1) { // height is more than the width.
      // if total width is greater than the imgHeight.
      if (width > imgHeight) imgWidth = imgHeight;
    }

    // find the centre position in the source image.
    int x = (width - imgWidth) / 2;
    int y = (height - imgHeight) / 2;

    return new Rect(x, y, imgWidth, imgHeight);
  }
示例#13
0
  /**
   * Helper function to convert a Mat into a BufferedImage. Taken from
   * http://answers.opencv.org/question/10344/opencv-java-load-image-to-gui Author: 'Lucky Luke'
   *
   * @param matrix Mat of type CV_8UC3 or CV_8UC1
   * @return BufferedImage of type TYPE_3BYTE_BGR or TYPE_BYTE_GRAY
   */
  private static BufferedImage matToBufferedImage(Mat matrix) {
    int cols = matrix.cols();
    int rows = matrix.rows();
    int elemSize = (int) matrix.elemSize();
    byte[] data = new byte[cols * rows * elemSize];
    int type;

    matrix.get(0, 0, data);

    switch (matrix.channels()) {
      case 1:
        type = BufferedImage.TYPE_BYTE_GRAY;
        break;

      case 3:
        type = BufferedImage.TYPE_3BYTE_BGR;

        // bgr to rgb
        byte b;
        for (int i = 0; i < data.length; i = i + 3) {
          b = data[i];
          data[i] = data[i + 2];
          data[i + 2] = b;
        }
        break;

      default:
        return null;
    }

    BufferedImage image = new BufferedImage(cols, rows, type);
    image.getRaster().setDataElements(0, 0, cols, rows, data);

    return image;
  }
  private void processFrameForMarkersDebug(VideoCapture capture) {
    ArrayList<MatOfPoint> components = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    // Get original image.
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    // Get gray scale image.
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

    // Get image segment to detect marker.
    Mat imgSegmentMat = cloneMarkerImageSegment(mGray);
    Mat thresholdedImgMat = new Mat(imgSegmentMat.size(), imgSegmentMat.type());
    applyThresholdOnImage(imgSegmentMat, thresholdedImgMat);
    copyThresholdedImageToRgbImgMat(thresholdedImgMat, mRgba);

    Scalar contourColor = new Scalar(0, 0, 255);
    Scalar codesColor = new Scalar(255, 0, 0, 255);

    displayMarkersDebug(thresholdedImgMat, contourColor, codesColor);
    // displayThresholds(mRgba, codesColor, localThresholds);
    displayRectOnImageSegment(mRgba, false);

    if (components != null) components.clear();
    if (hierarchy != null) hierarchy.release();
    components = null;
    hierarchy = null;
  }
示例#15
0
  /**
   * Order the detected bands into band 1, band 2 and the multiplier. The farthest band from the
   * center of the resistor Mat will always be band 1.
   *
   * <p>Band contours are created from top left to bottom right of the image. Therefore unless
   * resistor is perfectly horizontal, the middle band will always be 2nd, and the order just needs
   * to be flipped if necessary
   */
  public void orderBands() {

    // Calculate the center of the resistor image.
    Point resistorImageCenter = new Point(resistorMat.cols() / 2, resistorMat.rows() / 2);

    double farthestDist = 0;
    int band1Idx = 0;

    // find the band farthest from the center.
    for (int i = 0; i < bands.size(); i++) {
      ColourBand cb = bands.get(i);
      double dist = euclidDist(resistorImageCenter, cb.center);
      if (dist > farthestDist) {
        farthestDist = dist;
        band1Idx = i;
      }
    }

    // assume bands are already in the correct order band1Idx == 0
    orderedBands = bands;

    // if bands are in reverse order, reverse the array to get the ordered bands
    if (band1Idx == 2) {
      java.util.Collections.reverse(orderedBands);
    }
  }
  private Scalar converScalarHsv2Rgba(Scalar hsvColor) {
    Mat pointMatRgba = new Mat();
    Mat pointMatHsv = new Mat(1, 1, CvType.CV_8UC3, hsvColor);
    Imgproc.cvtColor(pointMatHsv, pointMatRgba, Imgproc.COLOR_HSV2RGB_FULL, 4);

    return new Scalar(pointMatRgba.get(0, 0));
  }
示例#17
0
  public static void main(String[] args) {
    try {

      System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
      Mat source = Imgcodecs.imread("test_image.jpg", 0);

      Mat destination = new Mat(source.rows(), source.cols(), source.type());
      Imgproc.GaussianBlur(source, source, new Size(45, 45), 0);
      Imgproc.adaptiveThreshold(
          source, source, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 75, 10);
      Core.bitwise_not(source, source);

      // Line detection
      Mat img2 = null;
      Imgproc.cvtColor(source, img2, Imgproc.COLOR_GRAY2RGB);

      Mat img3 = null;
      Imgproc.cvtColor(source, img3, Imgproc.COLOR_GRAY2RGB);

      MatOfInt4 lines = new MatOfInt4();
      // Imgproc.HoughLines(img, lines, rho, theta, threshold);

      // Write to File
      Imgcodecs.imwrite("gaussian.jpg", source);
      System.out.println("Success!");
    } catch (Exception e) {
      System.out.println("Error has occurred: " + e.getMessage());
    }
  }
示例#18
0
  private static double perceptualHash(String sourceImagePath, String targetImagePath) {

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    Mat matSrc = Imgcodecs.imread(sourceImagePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat matTrgt = Imgcodecs.imread(targetImagePath, Imgcodecs.CV_LOAD_IMAGE_COLOR);
    Mat matSrcResize = new Mat();
    Mat matTrgtResize = new Mat();
    Imgproc.resize(
        matSrc,
        matSrcResize,
        new Size(matSrc.width(), matSrc.height()),
        0,
        0,
        Imgproc.INTER_NEAREST);
    Imgproc.resize(
        matTrgt,
        matTrgtResize,
        new Size(matTrgt.width(), matTrgt.height()),
        0,
        0,
        Imgproc.INTER_LANCZOS4);
    Mat matSrcDst = new Mat();
    Mat matTrgtDst = new Mat();
    Imgproc.resize(matSrcResize, matSrcDst, new Size(8, 8), 0, 0, Imgproc.INTER_CUBIC);
    Imgproc.resize(matTrgtResize, matTrgtDst, new Size(8, 8), 0, 0, Imgproc.INTER_CUBIC);
    Imgproc.cvtColor(matSrcDst, matSrcDst, Imgproc.COLOR_BGR2GRAY);
    Imgproc.cvtColor(matTrgtDst, matTrgtDst, Imgproc.COLOR_BGR2GRAY);

    int iAvgSrc = 0, iAvgTrgt = 0;
    int[] arrSrc = new int[64];
    int[] arrTrgt = new int[64];
    for (int i = 0; i < 8; i++) {
      byte[] dataSrc = new byte[8];
      matSrcDst.get(i, 0, dataSrc);
      byte[] dataTrgt = new byte[8];
      matTrgtDst.get(i, 0, dataTrgt);

      int tmp = i * 8;
      for (int j = 0; j < 8; j++) {
        int tmpSrc = tmp + j;
        arrSrc[tmpSrc] = dataSrc[j] / 4 * 4;
        arrTrgt[tmpSrc] = dataTrgt[j] / 4 * 4;
        iAvgSrc += arrSrc[tmpSrc];
        iAvgTrgt += arrTrgt[tmpSrc];
      }
    }

    iAvgSrc /= 64;
    iAvgTrgt /= 64;
    for (int i = 0; i < 64; i++) {
      arrSrc[i] = (arrSrc[i] >= iAvgSrc) ? 1 : 0;
      arrTrgt[i] = (arrTrgt[i] >= iAvgTrgt) ? 1 : 0;
    }
    int iDiffNum = 0;
    for (int i = 0; i < 64; i++) if (arrSrc[i] != arrTrgt[i]) ++iDiffNum;

    return 1.0 - (double) iDiffNum / 64;
  }
示例#19
0
 public static Mat findCardNumber(String path) {
   Mat mat = Highgui.imread(path);
   int x = 0;
   int y = (int) (mat.height() * ((double) 30 / 54));
   int width = mat.cols();
   int height = (int) (mat.height() * ((double) 7 / 54));
   return mat.submat(new Rect(x, y, width, height));
 }
 @Override
 public void OnKeypointsFoundForOther(Mat image) {
   Bitmap disp =
       Bitmap.createBitmap(
           image.cols(), image.rows(), Bitmap.Config.ARGB_8888); // Android uses ARGB_8888
   Utils.matToBitmap(image, disp);
   mImageAdapter.setOtherKeyPointImage(disp);
 }
示例#21
0
 public static void V2M() {
   Vector<double[]> v = new Vector<double[]>();
   double[] temp = {1.0, 2.0, 3.0, 4.0};
   v.add(temp);
   v.add(temp);
   v.add(temp);
   Mat m = DataConverter.jvector2Mat(v);
   System.out.println(m.dump());
 }
示例#22
0
  public Mat process(Mat srcImage) {
    if (stage == 0) return srcImage;

    List<PolygonCv> targets = new ArrayList<>();
    PolygonCv bestTarget;

    Mat workingImage = srcImage.clone();

    _ColorSpace.process(workingImage);
    _ColorRange.process(workingImage);
    _Erode.process(workingImage);
    _Dilate.process(workingImage);
    //		_GrayScale.process(workingImage);
    //		_BlackWhite.process(workingImage);

    if (stage == 1) return workingImage;

    targets = findTargets(workingImage);
    workingImage = srcImage.clone();

    addTargetingRectangle(workingImage);

    if (targets.size() > 0) {
      bestTarget = findBestTarget(targets);

      if (networkTable != null) {
        targetAnalysis(bestTarget); // no return as it simply writes data to netTables
        networkTable.putNumber("FrameCount", frameCount++);
      }
      targetAnalysis(bestTarget, false);

      if (stage == 2) return workingImage; // commandline, so don't bother drawing anything

      if (stage == 3) {
        _OtherTargets.setPolygon(targets);
        _OtherTargets.process(workingImage);

        _BestTarget.setPolygon(bestTarget);
        _BestTarget.process(workingImage);

        _Reticle.setCenter(bestTarget.getCenterX(), bestTarget.getMaxY());
        _Reticle.setSize(Render.RETICLE_SIZE, Render.RETICLE_SIZE);
        _Reticle.process(workingImage);
      }

      if (stage == 4) {

        _BoundingBox.setCenter(bestTarget.getCenterX(), bestTarget.getCenterY());
        _BoundingBox.setSize(bestTarget.getHeight() / 2, bestTarget.getWidth() / 2);
        _BoundingBox.process(workingImage);
      }
    }

    // _CrossHair.process(workingImage);

    return workingImage;
  }
示例#23
0
 Mat setupMatrix(double hSmallToGlass[], double hGlassToSmall[]) {
   Mat hSmallToGlassMat = HMatFromArray(hSmallToGlass);
   Mat hGlassToSmallMat = hSmallToGlassMat.inv();
   double denominator = hGlassToSmallMat.get(2, 2)[0];
   for (int i = 0; i < 3; i++)
     for (int j = 0; j < 3; j++)
       hGlassToSmall[i * 3 + j] = hGlassToSmallMat.get(i, j)[0] / denominator;
   return hSmallToGlassMat;
 }
示例#24
0
 private static void printMatI(Mat mat) {
   int[] data = new int[mat.channels()];
   for (int r = 0; r < mat.rows(); r++) {
     for (int c = 0; c < mat.cols(); c++) {
       mat.get(r, c, data);
       log(lvl, "(%d, %d) %s", r, c, Arrays.toString(data));
     }
   }
 }
示例#25
0
  public MarkerTracker(Mat image) {
    this.image = image;

    imgGray = new Mat(image.size(), image.type());
    // Convert them to grayscale
    Imgproc.cvtColor(image, imgGray, Imgproc.COLOR_BGRA2GRAY);
    //  Core.normalize(imgGray, imgGray, 0, 255, Core.NORM_MINMAX);

  }
  public void testGetTrainDescriptors() {
    Mat train = new Mat(1, 1, CvType.CV_8U, new Scalar(123));
    Mat truth = train.clone();
    matcher.add(Arrays.asList(train));

    List<Mat> descriptors = matcher.getTrainDescriptors();

    assertEquals(1, descriptors.size());
    assertMatEqual(truth, descriptors.get(0));
  }
示例#27
0
  /**
   * Rotate a Mat by the specified degree
   *
   * @param src The source Mat
   * @param angle The angle by which to rotate by
   * @return The rotated Mat
   */
  public static Mat rotate(Mat src, double angle) {
    int len = src.cols() > src.rows() ? src.cols() : src.rows();
    Point pt = new Point(len / 2.0, len / 2.0);
    Mat r = Imgproc.getRotationMatrix2D(pt, angle, 1.0);
    Mat dst = new Mat();

    Imgproc.warpAffine(src, dst, r, new Size(len, len));

    return dst;
  }
示例#28
0
 // javadoc: buildOpticalFlowPyramid(img, pyramid, winSize, maxLevel)
 public static int buildOpticalFlowPyramid(
     Mat img, List<Mat> pyramid, Size winSize, int maxLevel) {
   Mat pyramid_mat = new Mat();
   int retVal =
       buildOpticalFlowPyramid_1(
           img.nativeObj, pyramid_mat.nativeObj, winSize.width, winSize.height, maxLevel);
   Converters.Mat_to_vector_Mat(pyramid_mat, pyramid);
   pyramid_mat.release();
   return retVal;
 }
  public Mat onCameraFrame(Mat inputFrame) {
    inputFrame.copyTo(mRgba);
    Point center = new Point(mRgba.width() / 2, mRgba.height() / 2);
    double angle = -90;
    double scale = 1.0;

    Mat mapMatrix = Imgproc.getRotationMatrix2D(center, angle, scale);
    Imgproc.warpAffine(mRgba, mGrayMat, mapMatrix, mRgba.size(), Imgproc.INTER_LINEAR);
    return mGrayMat;
  }
示例#30
0
 public static BufferedImage bufferedImage(Mat m) {
   int type = BufferedImage.TYPE_BYTE_GRAY;
   if (m.channels() > 1) {
     type = BufferedImage.TYPE_3BYTE_BGR;
   }
   BufferedImage image = new BufferedImage(m.cols(), m.rows(), type);
   m.get(
       0, 0, ((DataBufferByte) image.getRaster().getDataBuffer()).getData()); // get all the pixels
   return image;
 }