private void CreateAuxiliaryMats() {
    if (mRgba.empty()) return;

    mSizeRgba = mRgba.size();

    int rows = (int) mSizeRgba.height;
    int cols = (int) mSizeRgba.width;

    int left = cols / 8;
    int top = rows / 8;

    int width = cols * 3 / 4;
    int height = rows * 3 / 4;

    if (mRgbaInnerWindow == null)
      mRgbaInnerWindow = mRgba.submat(top, top + height, left, left + width);
    mSizeRgbaInner = mRgbaInnerWindow.size();

    if (mGrayInnerWindow == null && !mGray.empty())
      mGrayInnerWindow = mGray.submat(top, top + height, left, left + width);

    if (mBlurWindow == null) mBlurWindow = mRgba.submat(0, rows, cols / 3, cols * 2 / 3);

    if (mZoomCorner == null)
      mZoomCorner = mRgba.submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);

    if (mZoomWindow == null)
      mZoomWindow =
          mRgba.submat(
              rows / 2 - 9 * rows / 100,
              rows / 2 + 9 * rows / 100,
              cols / 2 - 9 * cols / 100,
              cols / 2 + 9 * cols / 100);
  }
예제 #2
0
  public void performMatch() {

    // create feature detectors and feature extractors
    FeatureDetector orbDetector = FeatureDetector.create(FeatureDetector.ORB);
    DescriptorExtractor orbExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

    // set the keypoints
    keyPointImg = new MatOfKeyPoint();
    orbDetector.detect(imgGray, keyPointImg);

    MatOfKeyPoint keyPointTempl = new MatOfKeyPoint();
    orbDetector.detect(templGray, keyPointTempl);

    // get the descriptions
    descImg = new Mat(image.size(), image.type());
    orbExtractor.compute(imgGray, keyPointImg, descImg);

    Mat descTempl = new Mat(template.size(), template.type());
    orbExtractor.compute(templGray, keyPointTempl, descTempl);

    // perform matching
    matches = new MatOfDMatch();
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    matcher.match(descImg, descTempl, matches);

    Log.i("perform match result", matches.size().toString());
  }
  private ArrayList<Double> applyThresholdOnImage(Mat srcImgMat, Mat outputImgMat) {
    double localThreshold;
    int startRow;
    int endRow;
    int startCol;
    int endCol;

    ArrayList<Double> localThresholds = new ArrayList<Double>();

    int numberOfTiles = mPreference.getNumberOfTiles();
    int tileWidth = (int) srcImgMat.size().height / numberOfTiles;
    int tileHeight = (int) srcImgMat.size().width / numberOfTiles;

    // Split image into tiles and apply threshold on each image tile separately.

    // process image tiles other than the last one.
    for (int tileRowCount = 0; tileRowCount < numberOfTiles; tileRowCount++) {
      startRow = tileRowCount * tileWidth;
      if (tileRowCount < numberOfTiles - 1) endRow = (tileRowCount + 1) * tileWidth;
      else endRow = (int) srcImgMat.size().height;

      for (int tileColCount = 0; tileColCount < numberOfTiles; tileColCount++) {
        startCol = tileColCount * tileHeight;
        if (tileColCount < numberOfTiles - 1) endCol = (tileColCount + 1) * tileHeight;
        else endCol = (int) srcImgMat.size().width;

        Mat tileThreshold = new Mat();
        Mat tileMat = srcImgMat.submat(startRow, endRow, startCol, endCol);
        // localThreshold = Imgproc.threshold(tileMat, tileThreshold, 0, 255, Imgproc.THRESH_BINARY
        // | Imgproc.THRESH_OTSU);
        // RNM: Adaptive threshold rules!
        localThreshold = 0x80;
        Imgproc.adaptiveThreshold(
            tileMat,
            tileThreshold,
            255,
            Imgproc.ADAPTIVE_THRESH_GAUSSIAN_C,
            Imgproc.THRESH_BINARY,
            91,
            2);
        Mat copyMat = outputImgMat.submat(startRow, endRow, startCol, endCol);
        tileThreshold.copyTo(copyMat);
        tileThreshold.release();
        localThresholds.add(localThreshold);
      }
    }

    return localThresholds;
  }
  private void processFrameForMarkersDebug(VideoCapture capture) {
    ArrayList<MatOfPoint> components = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    // Get original image.
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    // Get gray scale image.
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

    // Get image segment to detect marker.
    Mat imgSegmentMat = cloneMarkerImageSegment(mGray);
    Mat thresholdedImgMat = new Mat(imgSegmentMat.size(), imgSegmentMat.type());
    applyThresholdOnImage(imgSegmentMat, thresholdedImgMat);
    copyThresholdedImageToRgbImgMat(thresholdedImgMat, mRgba);

    Scalar contourColor = new Scalar(0, 0, 255);
    Scalar codesColor = new Scalar(255, 0, 0, 255);

    displayMarkersDebug(thresholdedImgMat, contourColor, codesColor);
    // displayThresholds(mRgba, codesColor, localThresholds);
    displayRectOnImageSegment(mRgba, false);

    if (components != null) components.clear();
    if (hierarchy != null) hierarchy.release();
    components = null;
    hierarchy = null;
  }
예제 #5
0
  /**
   * Finds and extracts all contours in the given Mat. Optionally also removes contours with areas
   * below that of MIN_CONTOUR_AREA.
   *
   * @param mask A mask of all resistors in the image
   * @param originalImage The original image from which the mask was created
   * @param thresholdByArea If true, remove contours below threshold
   * @return The list a found contours
   */
  private List<MatOfPoint> getContours(Mat mask, Mat originalImage, boolean thresholdByArea) {
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(
        mask,
        contours,
        hierarchy,
        Imgproc.RETR_EXTERNAL,
        Imgproc.CHAIN_APPROX_SIMPLE,
        new Point(0, 0));

    // remove any remaining noise by only keeping contours which area > threshold
    if (thresholdByArea) {
      for (int i = 0; i < contours.size(); i++) {
        double area = Imgproc.contourArea(contours.get(i));
        if (area < MIN_CONTOUR_AREA || area > 6000) {
          contours.remove(i);
          i--;
        }
      }
    }

    Mat drawing = Mat.zeros(originalImage.size(), CvType.CV_8U);

    for (int i = 0; i < contours.size(); i++) {
      Scalar color = new Scalar(255, 255, 255);
      Imgproc.drawContours(drawing, contours, i, color, 4, 8, hierarchy, 0, new Point());
    }
    paintBR(drawing);

    return contours;
  }
예제 #6
0
  public MarkerTracker(Mat image, Mat template) {
    this.image = image;
    this.template = template;
    Log.i("Marker-Tracker", "image is null?::" + (null == image));

    imgGray = new Mat(image.size(), image.type());
    templGray = new Mat(template.size(), template.type());
    // Convert them to grayscale
    Imgproc.cvtColor(image, imgGray, Imgproc.COLOR_BGRA2GRAY);
    //  Core.normalize(imgGray, imgGray, 0, 255, Core.NORM_MINMAX);

    // Mat	grayImage02 = new Mat(image02.rows(), image02.cols(), image02.type());
    Imgproc.cvtColor(template, templGray, Imgproc.COLOR_BGRA2GRAY);
    //        Core.normalize(templGray, templGray, 0, 255, Core.NORM_MINMAX);

  }
예제 #7
0
  public MarkerTracker(Mat image) {
    this.image = image;

    imgGray = new Mat(image.size(), image.type());
    // Convert them to grayscale
    Imgproc.cvtColor(image, imgGray, Imgproc.COLOR_BGRA2GRAY);
    //  Core.normalize(imgGray, imgGray, 0, 255, Core.NORM_MINMAX);

  }
예제 #8
0
파일: VisionTest.java 프로젝트: vkee/maslab
 private static void updateWindow(JLabel imagePane, Mat mat) {
   int w = (int) (mat.size().width);
   int h = (int) (mat.size().height);
   if (imagePane.getWidth() != w || imagePane.getHeight() != h) {
     imagePane.setSize(w, h);
   }
   BufferedImage bufferedImage = Mat2Image.getImage(mat);
   imagePane.setIcon(new ImageIcon(bufferedImage));
 }
예제 #9
0
  public Mat onCameraFrame(Mat inputFrame) {
    inputFrame.copyTo(mRgba);
    Point center = new Point(mRgba.width() / 2, mRgba.height() / 2);
    double angle = -90;
    double scale = 1.0;

    Mat mapMatrix = Imgproc.getRotationMatrix2D(center, angle, scale);
    Imgproc.warpAffine(mRgba, mGrayMat, mapMatrix, mRgba.size(), Imgproc.INTER_LINEAR);
    return mGrayMat;
  }
예제 #10
0
  /**
   * Creates Resistor objects for all resistors extracted from given contours. Optionally, also
   * displays a bounding rectangle for all contours in the top left frame of the GUI.
   *
   * @param contours The contours defining the resistors
   * @param image The image from which the contours were extracted
   * @param showBoundingRect If true draws a bounding rectange for each contour
   * @return A list of Resistor objects
   */
  private List<Resistor> extractResistorsFromContours(
      List<MatOfPoint> contours, Mat image, boolean showBoundingRect) {
    List<Mat> extractedResistors = new ArrayList<Mat>();
    List<Rect> boundingRect = new ArrayList<Rect>();
    List<Resistor> resistors = new ArrayList<Resistor>();

    for (int i = 0; i < contours.size(); i++) {
      // bounding rectangle
      boundingRect.add(Imgproc.boundingRect(contours.get(i)));
      Mat mask = Mat.zeros(image.size(), CvType.CV_8U);
      Imgproc.drawContours(mask, contours, i, new Scalar(255), Core.FILLED);

      Mat contourRegion;
      Mat imageROI = new Mat();
      image.copyTo(imageROI, mask);
      contourRegion = new Mat(imageROI, boundingRect.get(i));
      extractedResistors.add(contourRegion);

      // the center of the resistor as a point within the original captured image
      Point resistorCenterPoint = findCenter(contours.get(i));

      // create a new resistor entry
      Resistor r = new Resistor(resistorCenterPoint, contourRegion);
      resistors.add(r);
    }

    if (showBoundingRect) {
      Mat drawing = new Mat();
      image.copyTo(drawing);
      for (int i = 0; i < contours.size(); i++) {
        Core.rectangle(
            drawing, boundingRect.get(i).tl(), boundingRect.get(i).br(), new Scalar(0, 0, 255), 2);
      }
      paintTL(drawing);
    }

    return resistors;
  }
  private void processFrameForMarkersFull(VideoCapture capture, DtouchMarker marker) {
    ArrayList<MatOfPoint> components = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    // Get original image.
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    // Get gray scale image.
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
    // Get image segment to detect marker.
    markerPosition = calculateImageSegmentArea(mGray);
    Mat imgSegmentMat = cloneMarkerImageSegment(mGray);
    // apply threshold.
    Mat thresholdedImgMat = new Mat(imgSegmentMat.size(), imgSegmentMat.type());
    applyThresholdOnImage(imgSegmentMat, thresholdedImgMat);
    imgSegmentMat.release();
    // find markers.
    boolean markerFound = findMarkers(thresholdedImgMat, marker, components, hierarchy);
    thresholdedImgMat.release();
    // Marker detected.
    if (markerFound) {
      setMarkerDetected(true);
      // if marker is found then copy the marker image segment.
      mMarkerImage = cloneMarkerImageSegment(mRgba);
      // display codes on the original image.
      // displayMarkerCodes(mRgba, markers);
      // display rect with indication that a marker is identified.
      displayRectOnImageSegment(mRgba, true);
      // display marker image
      displayMarkerImage(mMarkerImage, mRgba);
    } else displayRectOnImageSegment(mRgba, false);

    if (components != null) components.clear();
    if (hierarchy != null) hierarchy.release();
    components = null;
    hierarchy = null;
  }
예제 #12
0
파일: FdView.java 프로젝트: nagyist/XFace
  @Override
  protected Bitmap processFrame(VideoCapture capture) {
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) ;
      {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
      mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
      if (mJavaDetector != null)
        mJavaDetector.detectMultiScale(
            mGray,
            faces,
            1.1,
            2,
            2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
            ,
            new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
            new Size());

      if (mZoomCorner == null || mZoomWindow == null) CreateAuxiliaryMats();

      Rect[] facesArray = faces.toArray();

      for (int i = 0; i < facesArray.length; i++) {
        Rect r = facesArray[i];
        Core.rectangle(mGray, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
        Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);

        eyearea =
            new Rect(
                r.x + r.width / 8,
                (int) (r.y + (r.height / 4.5)),
                r.width - 2 * r.width / 8,
                (int) (r.height / 3.0));
        Core.rectangle(mRgba, eyearea.tl(), eyearea.br(), new Scalar(255, 0, 0, 255), 2);
        Rect eyearea_right =
            new Rect(
                r.x + r.width / 16,
                (int) (r.y + (r.height / 4.5)),
                (r.width - 2 * r.width / 16) / 2,
                (int) (r.height / 3.0));
        Rect eyearea_left =
            new Rect(
                r.x + r.width / 16 + (r.width - 2 * r.width / 16) / 2,
                (int) (r.y + (r.height / 4.5)),
                (r.width - 2 * r.width / 16) / 2,
                (int) (r.height / 3.0));
        Core.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(), new Scalar(255, 0, 0, 255), 2);
        Core.rectangle(
            mRgba, eyearea_right.tl(), eyearea_right.br(), new Scalar(255, 0, 0, 255), 2);

        if (learn_frames < 5) {
          teplateR = get_template(mCascadeER, eyearea_right, 24);
          teplateL = get_template(mCascadeEL, eyearea_left, 24);
          learn_frames++;
        } else {

          match_value = match_eye(eyearea_right, teplateR, FdActivity.method);

          match_value = match_eye(eyearea_left, teplateL, FdActivity.method);
          ;
        }
        Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2, mZoomWindow2.size());
        Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow, mZoomWindow.size());
      }
    } else if (mDetectorType == NATIVE_DETECTOR) {
      if (mNativeDetector != null) mNativeDetector.detect(mGray, faces);
    } else {
      Log.e(TAG, "Detection method is not selected!");
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
      Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

    try {
      Utils.matToBitmap(mRgba, bmp);
    } catch (Exception e) {
      Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
      bmp.recycle();
      bmp = null;
    }

    return bmp;
  }
예제 #13
0
  /**
   * Determines which pieces are kings
   *
   * @param in Mat image of board
   */
  public void determineKings(Mat in) {
    int playSquares = 32;

    Mat dst = new Mat(in.rows(), in.cols(), in.type());
    in.copyTo(dst);

    Imgproc.cvtColor(dst, dst, Imgproc.COLOR_BGR2GRAY); // change to single color

    Mat canny = new Mat();
    Imgproc.Canny(dst, canny, 100, 200); // make image a canny image that is only edges; 2,4
    // lower threshold values find more edges
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat(); // holds nested contour information
    Imgproc.findContours(
        canny,
        contours,
        hierarchy,
        Imgproc.RETR_LIST,
        Imgproc.CHAIN_APPROX_SIMPLE); // Imgproc.RETR_LIST, TREE

    // draw contour image
    Mat mask = new Mat();
    mask = Mat.zeros(dst.size(), dst.type());
    Imgproc.drawContours(
        mask, contours, -1, new Scalar(255, 255, 255), 1, 8, hierarchy, 2, new Point());
    Highgui.imwrite("contours.jpg", mask);

    ArrayList occupied = new ArrayList<Integer>();
    for (int i = 0; i < playSquares; i++) {
      if (board[i] != 0) occupied.add(i);
    }

    for (int i = 0; i < contours.size(); i++) // assuming only contours are checker pieces
    {
      // determine if it should be a king
      // use Rect r = Imgproc.boundingRect then find height of it by r.height

      // Get bounding rect of contour
      Rect bound = Imgproc.boundingRect(contours.get(i));

      if (bound.height > in.rows() / 8) {
        // board[(int) occupied.get(0)]++; // make it a king
        // occupied.remove(0);
      }
    }

    // or apply to each region of interest

    /*
    // keep track of starting row square
    int parity = 0; // 0 is even, 1 is odd, tied to row number
    int count = 0; // row square
    int rowNum = 0; // row number, starting at 0

    int vsegment = in.rows() / 8; // only accounts 8 playable
    int hsegment = in.cols() / 12; // 8 playable, 2 capture, 2 extra
    int offset = hsegment * 2; // offset for playable board

    // For angle of camera
    int dx = 48;
    hsegment -= 8;


    // Go through all playable squares
    for (int i = 0; i < playSquares; i++)
    {
    	// change offset depending on the row
    	if (parity == 0) // playable squares start on immediate left
    		offset = hsegment * 3 + dx;
    	else // playable squares start on 2nd square from left
    		offset = hsegment * 2 + dx;

    	// find where roi should be
    	Point p1 = new Point(offset + count * hsegment, rowNum * vsegment); // top left point of rectangle (x,y)
    	Point p2 = new Point(offset + (count + 1) * hsegment, (rowNum + 1) * vsegment); // bottom right point of rectangle (x,y)

    	// create rectangle that is board square
    	Rect bound = new Rect(p1, p2);

    	// frame only includes rectangle
    	Mat roi = new Mat(in, bound);

           Imgproc.cvtColor(roi, roi, Imgproc.COLOR_BGR2GRAY); // change to single color

           Mat canny = new Mat();
           Imgproc.Canny(roi, canny, 2, 4); // make image a canny image that is only edges; 2,4
           // lower threshold values find more edges
           List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
           Mat hierarchy = new Mat(); // holds nested contour information
           Imgproc.findContours(canny, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); // Imgproc.RETR_LIST, TREE

           // Get bounding rect of contour
              Rect rect = Imgproc.boundingRect(contours.get(0));

              if (rect.height > in.rows() / 8)
    	{
    		board[i]++; // make it a king
    	}

    	count += 2;
    	if (count == 8)
    	{
    		parity = ++parity % 2; // change odd or even
    		count = 0;
    		rowNum++;
    		hsegment += 1;
    		dx -= 6;
    	}
    }*/
  }
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    mRgba = inputFrame.rgba();
    // convert to Gray scale
    Imgproc.cvtColor(mRgba, img_gray, Imgproc.COLOR_BGR2GRAY);
    // make it binary with threshold=100
    Imgproc.threshold(img_gray, erd, 100, 255, Imgproc.THRESH_OTSU);
    // remove pixel noise by "erode" with structuring element of 9X9
    Mat erode = Imgproc.getStructuringElement(Imgproc.MORPH_ERODE, new Size(9, 9));
    Imgproc.erode(erd, tgt, erode);
    // apply "dilation" to enlarge object
    Mat dilate = Imgproc.getStructuringElement(Imgproc.MORPH_DILATE, new Size(9, 9));
    Imgproc.dilate(tgt, erd, dilate);
    // take a window
    Size s = erd.size();
    int W = (int) s.width;
    int H = (int) s.height;
    Rect r = new Rect(0, H / 2 - 100, W, 200);
    Mat mask = new Mat(s, CvType.CV_8UC1, new Scalar(0, 0, 0));
    rectangle(mask, r.tl(), r.br(), new Scalar(255, 255, 255), -1);
    erd.copyTo(window, mask);

    // find the contours
    Imgproc.findContours(window, contours, dest, 0, 2);
    // find largest contour
    int maxContour = -1;
    double area = 0;
    for (int i = 0; i < contours.size(); i++) {
      double contArea = Imgproc.contourArea(contours.get(i));
      if (contArea > area) {
        area = contArea;
        maxContour = i;
      }
    }
    // form bounding rectangle for largest contour
    Rect rect = null;
    if (maxContour > -1) rect = Imgproc.boundingRect(contours.get(maxContour));
    // position to center
    while (!train) {
      if (rect != null) {
        Imgproc.rectangle(mRgba, rect.tl(), rect.br(), new Scalar(255, 255, 255), 5);
        if ((rect.x + rect.width / 2) > W / 2 - 20 && (rect.x + rect.width / 2) < W / 2 + 20) {
          runOnUiThread(
              new Runnable() {
                @Override
                public void run() {
                  Toast.makeText(getApplicationContext(), "OK", Toast.LENGTH_SHORT).show();
                }
              });
          train = true;
        }
      }
      if (contours != null) contours.clear();
      return mRgba;
    }
    if (train) {
      if (rect != null) {
        Imgproc.rectangle(mRgba, rect.tl(), rect.br(), new Scalar(255, 255, 255), 5);
        // direction of movement
        int thr = 100;
        if ((rect.x + rect.width / 2) < (W / 2 - thr)) {
          // move to the RIGHT
          uHandler.obtainMessage(MainActivity.LEFT).sendToTarget();
        } else {
          if ((rect.x + rect.width / 2) > (W / 2 + thr)) {
            uHandler.obtainMessage(MainActivity.RIGHT).sendToTarget();
          } else {
            uHandler.obtainMessage(MainActivity.FORWARD).sendToTarget();
          }
        }
      } else {
        // stop moving
        uHandler.obtainMessage(MainActivity.STOP).sendToTarget();
      }
    }
    if (contours != null) contours.clear();
    return mRgba;
  }
  public Mat onCameraFrame(Mat inputFrame) {
    inputFrame.copyTo(mRgba);

    switch (ImageManipulationsActivity.viewMode) {
      case ImageManipulationsActivity.VIEW_MODE_RGBA:
        break;

      case ImageManipulationsActivity.VIEW_MODE_HIST:
        if ((mSizeRgba == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        int thikness = (int) (mSizeRgba.width / (mHistSizeNum + 10) / 5);
        if (thikness > 5) thikness = 5;
        int offset = (int) ((mSizeRgba.width - (5 * mHistSizeNum + 4 * 10) * thikness) / 2);
        // RGB
        for (int c = 0; c < 3; c++) {
          Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges);
          Core.normalize(mHist, mHist, mSizeRgba.height / 2, 0, Core.NORM_INF);
          mHist.get(0, 0, mBuff);
          for (int h = 0; h < mHistSizeNum; h++) {
            mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
            mP1.y = mSizeRgba.height - 1;
            mP2.y = mP1.y - 2 - (int) mBuff[h];
            Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness);
          }
        }
        // Value and Hue
        Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
        // Value
        Imgproc.calcHist(
            Arrays.asList(mIntermediateMat), mChannels[2], mMat0, mHist, mHistSize, mRanges);
        Core.normalize(mHist, mHist, mSizeRgba.height / 2, 0, Core.NORM_INF);
        mHist.get(0, 0, mBuff);
        for (int h = 0; h < mHistSizeNum; h++) {
          mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
          mP1.y = mSizeRgba.height - 1;
          mP2.y = mP1.y - 2 - (int) mBuff[h];
          Core.line(mRgba, mP1, mP2, mWhilte, thikness);
        }
        // Hue
        Imgproc.calcHist(
            Arrays.asList(mIntermediateMat), mChannels[0], mMat0, mHist, mHistSize, mRanges);
        Core.normalize(mHist, mHist, mSizeRgba.height / 2, 0, Core.NORM_INF);
        mHist.get(0, 0, mBuff);
        for (int h = 0; h < mHistSizeNum; h++) {
          mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
          mP1.y = mSizeRgba.height - 1;
          mP2.y = mP1.y - 2 - (int) mBuff[h];
          Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness);
        }
        break;

      case ImageManipulationsActivity.VIEW_MODE_CANNY:
        if ((mRgbaInnerWindow == null)
            || (mGrayInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
        Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
        break;

      case ImageManipulationsActivity.VIEW_MODE_SOBEL:
        Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_RGBA2GRAY);

        if ((mRgbaInnerWindow == null)
            || (mGrayInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();

        Imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
        Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
        Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
        break;

      case ImageManipulationsActivity.VIEW_MODE_SEPIA:
        Core.transform(mRgba, mRgba, mSepiaKernel);
        break;

      case ImageManipulationsActivity.VIEW_MODE_ZOOM:
        if ((mZoomCorner == null)
            || (mZoomWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        Imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());

        Size wsize = mZoomWindow.size();
        Core.rectangle(
            mZoomWindow,
            new Point(1, 1),
            new Point(wsize.width - 2, wsize.height - 2),
            new Scalar(255, 0, 0, 255),
            2);
        break;

      case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
        if ((mRgbaInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        Imgproc.resize(mRgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
        Imgproc.resize(
            mIntermediateMat, mRgbaInnerWindow, mSizeRgbaInner, 0., 0., Imgproc.INTER_NEAREST);
        break;

      case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
        if ((mRgbaInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        /*
        Imgproc.cvtColor(mRgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
        Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
        Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
        */

        Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
        mRgbaInnerWindow.setTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
        Core.convertScaleAbs(mRgbaInnerWindow, mIntermediateMat, 1. / 16, 0);
        Core.convertScaleAbs(mIntermediateMat, mRgbaInnerWindow, 16, 0);
        break;
    }

    return mRgba;
  }
  void hpTrack(Mat mRgba, WorkingHeadPose hp, Rect[] facesArray) {
    MatOfByte status = new MatOfByte();
    // Mat prev=new Mat(mRgba.width(),mRgba.height(),CvType.CV_8UC1);
    // Mat curr=new Mat(mRgba.width(),mRgba.height(),CvType.CV_8UC1);
    Mat prev = new Mat(mRgba.size(), CvType.CV_8UC1);
    Mat curr = new Mat(mRgba.size(), CvType.CV_8UC1);
    MatOfPoint2f tmpCorners = new MatOfPoint2f();
    MatOfFloat err = new MatOfFloat();
    int i, j, count;
    TermCriteria optical_flow_termination_criteria =
        new TermCriteria(); // =(TermCriteria.MAX_ITER|TermCriteria.EPS,20,.3);//  (
                            // CV_TERMCRIT_ITER | CV_TERMCRIT_EPS, 20, .3 );
    optical_flow_termination_criteria.epsilon = .3;
    optical_flow_termination_criteria.maxCount = 20;
    // Good features to track
    Imgproc.cvtColor(hp.previousFrame, prev, Imgproc.COLOR_RGBA2GRAY, 0);
    Imgproc.cvtColor(mRgba, curr, Imgproc.COLOR_RGBA2GRAY, 0);
    // Video.calcOpticalFlowPyrLK(prev, curr,hp.previousCorners,tmpCorners,status,new MatOfFloat(),
    // new Size(10,10), 3,optical_flow_termination_criteria, 0, 1);
    // http://stackoverflow.com/questions/12561292/android-using-calcopticalflowpyrlk-with-matofpoint2f
    if (hp.previousCorners.total() > 0)
      // Video.calcOpticalFlowPyrLK(prev, curr,hp.previousCorners,tmpCorners,status,new
      // MatOfFloat(), new Size(11,11),5,optical_flow_termination_criteria, 0, 1);
      Video.calcOpticalFlowPyrLK(prev, curr, hp.previousCorners, tmpCorners, status, err);

    /*Point[] pointp = hp.previousCorners.toArray();
    Point[] pointn = tmpCorners.toArray();
    for (Point px : pointp)
    { Core.circle(mRgba, px, 15, new Scalar(255,0,0)); }
    for (Point py : pointn)
    { Core.circle(mRgba, py, 5, new Scalar(0,0,255)); }
    */

    // Point a,b;
    // a=new Point();
    // b=new Point();
    /*if(TempFace.length>0)
    	{

    	for( i = 0; i < hp.tempcorners.total(); i++ )
    	{
    		//center.x=facearray1[0].x + hp.corners.toList().get(i).x;
    		//center.y=facearray1[0].y + hp.corners.toList().get(i).y;
    		Point a = new Point(TempFace[0].tl().x+hp.tempcorners.toList().get(i).x,TempFace[0].tl().x+hp.tempcorners.toList().get(i).y);
    		Point b= new Point(TempFace[0].tl().y+tmpCorners.toList().get(i).x,TempFace[0].tl().y+tmpCorners.toList().get(i).y);
    		Core.line(mRgba, a,b, new Scalar(255,0,0),2);
    	}
    	}
    */
    count = 0;
    for (i = 0; i < hp.cornerCount; i += 1) {
      if (i == hp.corners.total()) break;
      if (status.toList().get(i) == 1) {
        count += 1;
      }
    }

    // Replace headPose->corners and headPose->modelPoints
    // imran
    // http://stackoverflow.com/questions/11273588/how-to-convert-matofpoint-to-matofpoint2f-in-opencv-java-api

    MatOfPoint2f corners2f = new MatOfPoint2f();
    hp.corners.convertTo(corners2f, CvType.CV_32FC2);
    List<Point> plist = new ArrayList<Point>();

    for (i = 0, j = 0; i < hp.cornerCount; i += 1) {
      if (i == hp.corners.total()) break;
      if (status.toList().get(i) == 1) {

        // plist.add(tmpCorners.toList().get(i));
        corners2f
            .toList()
            .set(j, tmpCorners.toList().get(i)); // .get(i)=tmpCorners.toList().get(i);
        hp.modelPoints.toList().set(j, hp.modelPoints.toList().get(i));
        // =hp.modelPoints.toList().get(i);

        // hp.corners[j] = tmpCorners[i];
        // headPose->modelPoints[j] = headPose->modelPoints[i];
        j += 1;
      }
    }
    // corners2f.fromList(plist);
    corners2f.convertTo(hp.corners, CvType.CV_32S);
    Log.i("CournerCount", "Reassigning" + count);
    hp.cornerCount = count;
  }
예제 #17
0
  public MatOfPoint getContourOfBestMatch() {

    if (matches == null) return null;

    Mat threshold = new Mat(imgGray.size(), imgGray.type());
    Imgproc.threshold(imgGray, threshold, 70, 255, Imgproc.THRESH_TOZERO);

    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(
        threshold, contours, new Mat(), Imgproc.RETR_CCOMP, Imgproc.CHAIN_APPROX_NONE);
    // HashMap<Integer,MatOfPoint> coordinates = computeCoord(contours,)

    if (contours.size() == 0) return null;
    List<DMatch> matchList = matches.toList();
    List<KeyPoint> keyPointList = keyPointImg.toList();

    HashMap<Integer, Double> contourDensityMap = new HashMap<Integer, Double>();

    Log.i("getContourBestMatch::", "contour size::" + contours.size());

    for (int idx = 0; idx < contours.size(); idx++) {
      MatOfPoint2f ctr2f = new MatOfPoint2f(contours.get(idx).toArray());
      // double contourarea = Imgproc.contourArea(contours.get(idx));

      double contourarea = contours.get(idx).rows();
      if (contourarea < 50) continue;

      Rect r = Imgproc.boundingRect(contours.get(idx));

      double count = 0;
      // Log.i("contour area","contour area is::"+contourarea);
      for (DMatch match : matchList) {

        Point q = keyPointList.get(match.queryIdx).pt;
        if (q.x >= r.x && q.x <= (r.x + r.width) && q.y >= r.y && q.y <= (r.y + r.height)) count++;

        //
        // if(Imgproc.pointPolygonTest(ctr2f,keyPointList.get(match.queryIdx).pt,true)>0){
        //                    if(null ==contourDensityMap.get(idx))
        //                        contourDensityMap.put(idx,1.0);
        //
        //                    else{
        //                        contourDensityMap.put(idx,((Double)contourDensityMap.get(idx))+1);
        //                    }
        //
        //                }

      }
      //            if(contourDensityMap.containsKey(idx)) {
      //
      // Log.i("contourPoint","idx::"+idx+"count::"+contourDensityMap.get(idx)+"contour
      // area::"+contourarea);
      //                contourDensityMap.put(idx, contourDensityMap.get(idx) / contourarea);
      //            }
      if (count != 0) {
        contourDensityMap.put(idx, count / contourarea);
      }
    }

    Log.i("MarkerTracker", "contour density size::" + contourDensityMap.size());

    Map.Entry<Integer, Double> maxEntry = null;

    for (Map.Entry<Integer, Double> entry : contourDensityMap.entrySet()) {
      Log.i("contourDensityMap", "Entry value::" + entry.getValue());
      if (maxEntry == null || entry.getValue().compareTo(maxEntry.getValue()) > 0) {
        maxEntry = entry;
      }
    }
    Log.i("maxEntry::", "" + (maxEntry == null ? null : maxEntry.getKey()));
    // return contours;
    return contours.get(maxEntry != null ? maxEntry.getKey() : 0);
  }
예제 #18
0
  public static void Circle(List<MatOfPoint> contours, int index) {
    int i = index;
    Mat mRGBA = new Mat();
    Utils.bitmapToMat(image, mRGBA);
    // cyklus s podmienkou na konci
    do {
      int buff[] = new int[4];
      hierarchy.get(0, i, buff);

      // Get contour form list
      Mat contour = contours.get(i);

      // id kont�ry
      int id = i;

      // dostaneme �a��ie id kont�ry
      i = buff[0];

      // zis�ujeme �i m�me dostato�ne ve�k� kont�ru aby sme sa �ou v�bec zaoberali
      if (Imgproc.contourArea(contour) > 500) {

        List<Point> points = new ArrayList<Point>();

        // dostaneme celkov� po�et kont�r
        int num = (int) contour.total();

        // vytvor�me si pole o dvojn�sobnej ve�kosti samotnej kontury
        int temp[] = new int[num * 2];

        // na��tame si kont�ru do do�asnej premennej
        contour.get(0, 0, temp);

        // konvertujeme  List<Point> do MatOfPoint2f pre pou�itie fitEllipse
        for (int j = 0; j < num * 2; j = j + 2) {
          points.add(new Point(temp[j], temp[j + 1]));
        }
        MatOfPoint2f specialPointMtx = new MatOfPoint2f(points.toArray(new Point[0]));

        // do premennej bound uklad�me dokonal� elipsu
        RotatedRect bound = Imgproc.fitEllipse(specialPointMtx);

        // Vypo��ta sa hodnota pi
        double pi =
            Imgproc.contourArea(contour) / ((bound.size.height / 2) * (bound.size.width / 2));

        // zis�ujeme toleranciu pi - zaoplenie
        if (Math.abs(pi - 3.14) > 0.03) {
          int k = buff[2];
          // zis�ujeme �i existuje nejak� rodi� kont�ry
          if (k != -1) {
            Circle(contours, k);
          }
          continue;
        }

        // konvertujeme MatOfPoint2f do MatOfPoint  pre funckiu fitEllipse - rozdie� je len v 32-bit
        // float a 32-bit int
        MatOfPoint NewMtx = new MatOfPoint(specialPointMtx.toArray());
        // dostaneme s�radnice najmen�ieho mo�n�ho �tvorca
        Rect box = Imgproc.boundingRect(NewMtx);
        // nacita obrazok znova
        Mat mat_for_count = new Mat();
        Utils.bitmapToMat(image, mat_for_count);
        // vytvori sa klon stvorca - dobry kandidat pre vyhladanie
        Mat candidate = ((mat_for_count).submat(box)).clone();
        // napln maticu binarnou ciernou
        Mat mask = new Mat(box.size(), candidate.type(), new Scalar(0, 0, 0));
        // naplni ciernu plochu bielimi konturami
        Imgproc.drawContours(
            mask,
            contours,
            id,
            new Scalar(255, 255, 255),
            -1,
            8,
            hierarchy,
            0,
            new Point(-box.x, -box.y));
        // ulozi sa kandidat
        Mat roi = new Mat(candidate.size(), candidate.type(), new Scalar(255, 255, 255));
        // ulozia sa len informacie o kandidatovi
        candidate.copyTo(roi, mask);

        double longAxis;
        double shortAxis;
        // ziska dve osy elipsy
        if (bound.size.height < bound.size.width) {
          shortAxis = bound.size.height / 2;
          longAxis = bound.size.width / 2;
        } else {
          shortAxis = bound.size.width / 2;
          longAxis = bound.size.height / 2;
        }

        // zastavi sa vyhladavanie pokial je elipsa prilis ovalna
        if ((longAxis / shortAxis) < 2.0) {
          signList.add(roi);
          boxList.add(box);
        }
      }
      // zis�uje sa �i je tam e�te �al�� kandid�t
    } while (i != -1);
  }
예제 #19
0
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
    }

    if (mZoomWindow == null || mZoomWindow2 == null) CreateAuxiliaryMats();

    MatOfRect faces = new MatOfRect();

    if (mJavaDetector != null)
      mJavaDetector.detectMultiScale(
          mGray,
          faces,
          1.1,
          2,
          2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
          new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
          new Size());

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++) {
      Imgproc.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);
      xCenter = (facesArray[i].x + facesArray[i].width + facesArray[i].x) / 2;
      yCenter = (facesArray[i].y + facesArray[i].y + facesArray[i].height) / 2;
      Point center = new Point(xCenter, yCenter);

      Imgproc.circle(mRgba, center, 10, new Scalar(255, 0, 0, 255), 3);

      Imgproc.putText(
          mRgba,
          "[" + center.x + "," + center.y + "]",
          new Point(center.x + 20, center.y + 20),
          Core.FONT_HERSHEY_SIMPLEX,
          0.7,
          new Scalar(255, 255, 255, 255));

      Rect r = facesArray[i];
      // compute the eye area
      Rect eyearea =
          new Rect(
              r.x + r.width / 8,
              (int) (r.y + (r.height / 4.5)),
              r.width - 2 * r.width / 8,
              (int) (r.height / 3.0));
      // split it
      Rect eyearea_right =
          new Rect(
              r.x + r.width / 16,
              (int) (r.y + (r.height / 4.5)),
              (r.width - 2 * r.width / 16) / 2,
              (int) (r.height / 3.0));
      Rect eyearea_left =
          new Rect(
              r.x + r.width / 16 + (r.width - 2 * r.width / 16) / 2,
              (int) (r.y + (r.height / 4.5)),
              (r.width - 2 * r.width / 16) / 2,
              (int) (r.height / 3.0));
      // draw the area - mGray is working grayscale mat, if you want to
      // see area in rgb preview, change mGray to mRgba
      Imgproc.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(), new Scalar(255, 0, 0, 255), 2);
      Imgproc.rectangle(
          mRgba, eyearea_right.tl(), eyearea_right.br(), new Scalar(255, 0, 0, 255), 2);

      if (learn_frames < 5) {
        teplateR = get_template(mJavaDetectorEye, eyearea_right, 24);
        teplateL = get_template(mJavaDetectorEye, eyearea_left, 24);
        learn_frames++;
      } else {
        // Learning finished, use the new templates for template
        // matching
        match_eye(eyearea_right, teplateR, method);
        match_eye(eyearea_left, teplateL, method);
      }

      // cut eye areas and put them to zoom windows
      Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2, mZoomWindow2.size());
      Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow, mZoomWindow.size());
    }

    return mRgba;
  }
예제 #20
0
  /**
   * Extracts and classifies colour bands for each Resistor. Each ColourBand object is instantiated
   * and linked to their parent Resistor object.
   *
   * @param resistorList A list of Resistor objects from which to extract the colour bands
   * @param paintDebugInfo If ture, the extracted colour band ROIs are displayed on the GUI
   */
  private void extractColourBandsAndClassify(List<Resistor> resistorList, boolean paintDebugInfo) {
    if (resistorList.size() > 0) {
      for (int r = 0; r < resistorList.size(); r++) {
        Mat resImg = resistorList.get(r).resistorMat;

        Mat imgHSV = new Mat();
        Mat satImg = new Mat();
        Mat hueImg = new Mat();

        // convert to HSV
        Imgproc.cvtColor(resImg, imgHSV, Imgproc.COLOR_BGR2HSV);
        ArrayList<Mat> channels = new ArrayList<Mat>();
        Core.split(imgHSV, channels);
        // extract channels
        satImg = channels.get(1); // saturation
        hueImg = channels.get(0); // hue

        // threshold saturation channel
        Mat threshedROISatBands = new Mat(); // ~130 sat thresh val
        Imgproc.threshold(satImg, threshedROISatBands, SAT_BAND_THRESH, 255, Imgproc.THRESH_BINARY);

        // threshold hue channel
        Mat threshedROIHueBands = new Mat(); // ~50 hue thresh val
        Imgproc.threshold(hueImg, threshedROIHueBands, HUE_BAND_THRESH, 255, Imgproc.THRESH_BINARY);

        // combine the thresholded binary images
        Mat bandROI = new Mat();
        Core.bitwise_or(threshedROIHueBands, threshedROISatBands, bandROI);

        // find contours in binary ROI image
        ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        Mat hierarchy = new Mat();
        Imgproc.findContours(
            bandROI,
            contours,
            hierarchy,
            Imgproc.RETR_EXTERNAL,
            Imgproc.CHAIN_APPROX_SIMPLE,
            new Point(0, 0));

        // remove any remaining noise by only keeping contours which area > threshold
        for (int i = 0; i < contours.size(); i++) {
          double area = Imgproc.contourArea(contours.get(i));
          if (area < MIN_BAND_AREA) {
            contours.remove(i);
            i--;
          }
        }

        // create a ColourBand object for each detected band
        // storing its center, the contour and the bandROI
        for (int i = 0; i < contours.size(); i++) {
          MatOfPoint contour = contours.get(i);

          // extract this colour band and store in a Mat
          Rect boundingRect = Imgproc.boundingRect(contour);
          Mat mask = Mat.zeros(bandROI.size(), CvType.CV_8U);
          Imgproc.drawContours(mask, contours, i, new Scalar(255), Core.FILLED);
          Mat imageROI = new Mat();
          resImg.copyTo(imageROI, mask);
          Mat colourBandROI = new Mat(imageROI, boundingRect);

          // instantiate new ColourBand object
          ColourBand cb = new ColourBand(findCenter(contour), contour, colourBandROI);

          // cluster the band colour
          cb.clusterBandColour(BAND_COLOUR_K_MEANS);

          // classify using the Lab colourspace as feature vector
          Mat sampleMat =
              new Mat(1, 3, CvType.CV_32FC1); // create a Mat contacting the clustered band colour
          sampleMat.put(0, 0, cb.clusteredColourLAB[0]);
          sampleMat.put(0, 1, cb.clusteredColourLAB[1]);
          sampleMat.put(0, 2, cb.clusteredColourLAB[2]);
          Mat classifiedValue = new Mat(1, 1, CvType.CV_32FC1);
          Mat neighborResponses = new Mat(); // dont actually use this
          Mat dists = new Mat(); // dont actually use this
          // classify
          knn.find_nearest(sampleMat, 3, classifiedValue, neighborResponses, dists);

          // cast classified value into Colour enum and store
          cb.classifiedColour = ColourEnumVals[(int) classifiedValue.get(0, 0)[0]];
          // add the band to the parent resistor
          resistorList.get(r).bands.add(cb);
        }

        // paint the extracted band ROIs
        if (paintDebugInfo) {
          Mat finalBandROIMask = Mat.zeros(bandROI.size(), CvType.CV_8U);
          for (int i = 0; i < contours.size(); i++) {
            Scalar color = new Scalar(255, 255, 255);
            Imgproc.drawContours(
                finalBandROIMask, contours, i, color, -1, 4, hierarchy, 0, new Point());
          }
          Mat colourROI = new Mat();
          resImg.copyTo(colourROI, finalBandROIMask);
          paintResistorSubRegion(colourROI, r);
        }
      }
    }
  }
예제 #21
0
 public void setImage(Mat image) {
   this.image = image;
   imgGray = new Mat(image.size(), image.type());
   Imgproc.cvtColor(image, imgGray, Imgproc.COLOR_BGRA2GRAY);
 }
예제 #22
0
 public void setTemplate(Mat template) {
   this.template = template;
   templGray = new Mat(template.size(), template.type());
   Imgproc.cvtColor(template, templGray, Imgproc.COLOR_BGRA2GRAY);
 }
예제 #23
0
 public Size getSize() {
   if (valid) {
     return mat.size();
   }
   return new Size();
 }
  public void hpFind(Mat mRgba, Mat mGray, WorkingHeadPose hp, Rect[] facesArray)
        // public void hpFind(Mat mRgba,Mat mGray,HeadPose hp)
      {

    int i;
    Log.i("HeadPose", "hpFind:Total Faces Found" + facesArray.length);
    if (hp.hpstatus == HeadPoseStatus.NONE) {
      if (facesArray.length < 1) return;
      TempFace = facesArray.clone();
      hp.cornerCount = hp.maxCorners;
      Rect roi =
          new Rect(
              (int) facesArray[0].tl().x,
              (int) (facesArray[0].tl().y),
              facesArray[0].width,
              (int) (facesArray[0].height)); // imran
      Mat cropped = new Mat();
      Mat GrayClone = new Mat();
      GrayClone = mGray.clone();
      cropped = GrayClone.submat(roi);
      hpFindCorners(cropped, hp);
      // mGray is untouched
      // ******** working fine upto here

      // Map face points to model

      if (hp.cornerCount < 4) return;

      Vector<Point3> points = new Vector<Point3>();
      if (hp.corners.total()
          > 0) // adding to make sure, copying is done perfectly.imran,was getting exception
      {
        Log.i("hpFind+", "hp.corners.total()" + hp.corners.total());
        Log.i("hpFind+", "hp.cornerCount" + hp.cornerCount);

        // Point3 temp1;//=new Point3();

        for (i = 0; i < hp.cornerCount; i++) {
          if (i == hp.corners.total()) break;
          // Log.i("hpFind+","Itertion"+i);
          points.add(
              new Point3(
                  (hp.corners.toList().get(i).x / facesArray[0].width) - 0.5,
                  -(hp.corners.toList().get(i).y / facesArray[0].height) + 0.5,
                  0.5 * Math.sin(PI * (hp.corners.toList().get(i).x / facesArray[0].width))));
          // modelPoints.toList().set(i,
          // hpmodel(hp.corners.toList().get(i).x/facesArray[0].width,hp.corners.toList().get(i).x/facesArray[0].height));
          // temp1=new Point3((hp.corners.toList().get(i).x/facesArray[0].width) -
          // 0.5,-(hp.corners.toList().get(i).y/facesArray[0].height) + 0.5, 0.5 * Math.sin(PI
          // *(hp.corners.toList().get(i).x/facesArray[0].width)));
          // modelPoints. .p  .toList().set .set(i,temp1);

        }
        modelPoints.fromList(points);
      }

      // imran example from marker.java , search for Point3f

      // Traslate corners from face coordinated to image coordinates
      for (i = 0; i < hp.cornerCount; i++) {
        if (i == hp.corners.total()) break;
        hp.corners.toList().get(i).x += facesArray[0].tl().x;
        hp.corners.toList().get(i).y += facesArray[0].br().y;
        // hp.corners.toList().set(i, hp.corners.toList().get(i)+facesArray[0].tl().x);

      }
      hp.corners.copyTo(hp.tempcorners); // .clone();
      // Change status
      hp.hpstatus = HeadPoseStatus.KEYFRAME;
    } else {
      if (facesArray.length > 1)
        TempFace = facesArray.clone(); // imran assigning here also,to better measure

      MatOfPoint2f corners2f = new MatOfPoint2f();
      hp.corners.convertTo(corners2f, CvType.CV_32FC2);
      hp.previousCorners = corners2f;
      corners2f.convertTo(hp.corners, CvType.CV_32S);

      hpTrack(mRgba, hp, facesArray);
      Point center = new Point();

      if (hp.cornerCount < 4) {
        hp.hpstatus = HeadPoseStatus.NONE;
        return;
      }

      hp.hpstatus = HeadPoseStatus.TRACKING;
    }
    if (hp.previousFrame == null) // imran
    {
      // hp.previousFrame =new Mat(mRgba.width(),mRgba.height(),CvType);
      hp.previousFrame = new Mat(mRgba.size(), CvType.CV_8UC4);
    }
    mRgba.copyTo(hp.previousFrame);
    // cvCopy(frame, headPose->previousFrame, NULL);

  }
예제 #25
0
  public Template performMatches(Map<String, Template> templates) {

    // create feature detectors and feature extractors
    FeatureDetector orbDetector = FeatureDetector.create(FeatureDetector.ORB);
    DescriptorExtractor orbExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

    MatOfKeyPoint keyPointImgT;
    Mat descImgT;
    // set the keypoints
    keyPointImgT = new MatOfKeyPoint();
    orbDetector.detect(imgGray, keyPointImgT);

    descImgT = new Mat(image.size(), image.type());
    orbExtractor.compute(imgGray, keyPointImgT, descImgT);

    Template best = null;
    matches = null;
    Map.Entry<String, Template> maxEntry = null;
    //  MatOfDMatch matches = new MatOfDMatch();

    for (Map.Entry<String, Template> entry : templates.entrySet()) {

      MatOfKeyPoint keyPointTempl = null;
      Mat descTempl = null;
      Mat tGray = null;

      Template t = entry.getValue();
      if (null == t.getTemplGray() || null == t.getDescTempl() || null == t.getKeyPointTempl()) {
        // read image from stored data
        Mat templ = readImgFromFile(t.getTemplName());

        tGray = new Mat(templ.size(), templ.type());
        Imgproc.cvtColor(templ, tGray, Imgproc.COLOR_BGRA2GRAY);

        keyPointTempl = new MatOfKeyPoint();
        orbDetector.detect(tGray, keyPointTempl);

        descTempl = new Mat(templ.size(), templ.type());
        orbExtractor.compute(tGray, keyPointTempl, descTempl);

        t.setKeyPointTempl(keyPointTempl);
        t.setDescTempl(descTempl);
      } else {
        descTempl = t.getDescTempl();
      }

      MatOfDMatch matchWithT = new MatOfDMatch();
      DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
      // matcher.radiusMatch(descImgT, descTempl, matchWithT,200);//
      matcher.match(descImgT, descTempl, matchWithT);
      List<DMatch> matchList = matchWithT.toList();
      //            float min = Float.MAX_VALUE;
      //            float max = Float.MIN_VALUE;
      //            for(int i=0;i<matchList.size();i++){
      //                min = matchList.get(i).distance<min?matchList.get(i).distance:min;
      //                max = matchList.get(i).distance>max?matchList.get(i).distance:max;
      //            }
      //            Log.i("min distance","min distance is::"+min+"max
      // distance::"+max+"size::"+matchList.size());

      //            Collections.sort(matchList, new Comparator<DMatch>() {
      //                @Override
      //                public int compare(DMatch o1, DMatch o2) {
      //                    if (o1.distance < o2.distance)
      //                        return -1;
      //                    if (o1.distance > o2.distance)
      //                        return 1;
      //                    return 0;
      //                }
      //            });

      float ratio = -1;
      if (matchList.size() > 0) ratio = findMinTwoRatio(matchList);

      if (ratio > 0.8 || ratio == -1) continue;
      Log.i("match", "ratio::" + ratio);

      // Todo:revisit logic
      if (matches == null || (matchWithT.size().height > matches.size().height)) {
        matches = matchWithT;
        keyPointImg = keyPointImgT;
        descImg = descImgT;
        best = t;
      }
    }

    //  Log.i("perform match result", matches.size().toString());

    return best;
  }
예제 #26
0
  /**
   * @param inputImg
   * @param minValue
   * @param maxValue
   * @return Mat
   */
  public static Mat thresholding(Mat inputImg, Integer minValue, Integer maxValue) {

    Mat frame = inputImg;
    // яскравість
    // frame.convertTo(frame , -1, 10d * 33 / 100, 0);
    // Imgproc.medianBlur(frame,frame, 17);

    // Core.bitwise_not(frame,frame );

    // Mat frame = new Mat(image.rows(), image.cols(), image.type());

    // frame.convertTo(frame, -1, 10d * 20 / 100, 0);

    Mat hsvImg = new Mat();
    List<Mat> hsvPlanes = new ArrayList<>();
    Mat thresholdImg = new Mat();

    int thresh_type = Imgproc.THRESH_BINARY_INV;

    // if (this.inverse.isSelected())
    // thresh_type = Imgproc.THRESH_BINARY;

    // threshold the image with the average hue value
    // System.out.println("size " +frame.size());
    hsvImg.create(frame.size(), CvType.CV_8U);
    // Imgproc.cvtColor(frame, hsvImg, Imgproc.COLOR_BGR2HSV);
    Core.split(hsvImg, hsvPlanes);

    // get the average hue value of the image
    // double threshValue = PreProcessingOperation.getHistAverage(hsvImg, hsvPlanes.get(0));
    // System.out.println(threshValue);
    /*
    if(threshValue > 40){
        maxValue = 160;
    }else{
        maxValue = 40;
    }*/

    //        Imgproc.threshold(hsvPlanes.get(1), thresholdImg, minValue , maxValue , thresh_type);

    Imgproc.blur(thresholdImg, thresholdImg, new Size(27, 27));

    // dilate to fill gaps, erode to smooth edges
    Imgproc.dilate(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 1);
    Imgproc.erode(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 1);

    Imgproc.threshold(thresholdImg, thresholdImg, minValue, maxValue, Imgproc.THRESH_BINARY);

    // create the new image
    Mat foreground = new Mat(frame.size(), CvType.CV_8UC3, new Scalar(255, 255, 255));
    Core.bitwise_not(thresholdImg, foreground);

    frame.copyTo(foreground, thresholdImg);

    ///////////////////////////////////////////////////////////////////////////////////////
    ///
    ////

    return foreground;
    /*Mat hsvImg = new Mat();
    List<Mat> hsvPlanes = new ArrayList<>();
    Mat thresholdImg = new Mat();
    int thresh_type = Imgproc.THRESH_BINARY_INV;
    // threshold the image with the average hue value
    hsvImg.create(inputImg.size(), CvType.CV_8U);
    Imgproc.cvtColor(inputImg, hsvImg, Imgproc.COLOR_BGR2HSV);
    Core.split(hsvImg, hsvPlanes);
    // get the average hue value of the image
    double threshValue = PreProcessingOperation.getHistAverage(hsvImg, hsvPlanes.get(0));
    Imgproc.threshold(hsvPlanes.get(0), thresholdImg, minValue,
            maxValue, thresh_type);
    Imgproc.blur(thresholdImg, thresholdImg, new Size(3, 3));
    // dilate to fill gaps, erode to smooth edges
    Imgproc.dilate(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 3);
    Imgproc.erode(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 1);
    Imgproc.threshold(thresholdImg, thresholdImg, minValue,
            maxValue, Imgproc.THRESH_BINARY);
    // create the new image
    Mat foreground = new Mat(inputImg.size(), CvType.CV_8UC3, new Scalar(255, 255, 255));
    inputImg.copyTo(foreground, thresholdImg);
    Core.bitwise_not(foreground,foreground);
    return foreground;*/
  }
예제 #27
0
  /**
   * Appelée à chaque nouvelle prise de vue par la caméra.
   *
   * <p>Son comportement sera différent suivant ce que l'on cherche à faire :
   *
   * <ul>
   *   <li>Si la porte n'est pas stable, on cherche alors à détecter l'événement porte stable pour
   *       pouvoir prendre une photo.
   *   <li>Si la porte est stable mais pas fermée, cela signifie que l'on a déjà pris une photo du
   *       contenu du frigo et on attend que la porte soit fermée pour revenir dans l'état initial.
   * </ul>
   *
   * @param inputFrame Image captée par la caméra
   */
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    Mat current = inputFrame.rgba();
    if (stable && !fermee) {
      // Une photo a été prise
      // On va rechercher l'événement : le flux vidéo représente des images noires
      Scalar scalaireN = new Scalar(0x00, 0x00, 0x00, 0xFF);
      Mat noir = new Mat(current.size(), current.type(), scalaireN);
      // noir est une matrice noire
      // Comparaison avec une image noire, résultat stocké dans une matrice diffNoir
      Mat diffNoir = new Mat(current.size(), current.type());
      Core.absdiff(current, noir, diffNoir);
      Double normeDiffNoir =
          new Double(Core.norm(diffNoir)); // Calclule de la norme de cette matrice
      n.add(normeDiffNoir); // Ajout de cette norme dans un conteneur
      compteur++; // Compteur du nombre d'images prises
      if (compteur > 11) {
        // S'il y a suffisamment d'images déjà prises, on vérifie que la porte est fermée
        fermee = true;
        int i = 0;
        while (fermee && i < 10) {
          // La porte est fermee si sur les dix dernières photos prises, la différence
          // entre une image noire et l'image current n'est pas trop grande.
          if (n.get(compteur - 1 - i) > 4500) {
            fermee =
                false; // Si cette différence est trop grande, on considère que la porte n'est pas
            // fermée
          }
          i++;
        } // Si elle n'a jamais été trop grande, la porte est effectivement fermée
        if (fermee) {
          // Remise à 0 du compteur s'il doit être réutilisé pour une nouvelle photo
          // De même pour le tableau n
          compteur = 0;
          n.clear();
          finish(); // Retour sur l'activité principale qui attend une ouverture du frigo.
        }
      }
    } else if (!stable) {
      // Aucune photo n'a encore été prise
      // On va rechercher l'événement : l'image est stable
      if (buffer == null) { // Première image reçue, il faut créer une matrice buffer qui contiendra
        // l'image précédente
        buffer = new Mat(current.size(), current.type());
        buffer = current.clone();
      } else { // C'est au moins la deuxième image reçue
        // Comparaison entre l'image précédente et l'image courante, résultat stocké dans une
        // matrice diffBuffer
        Mat diffBuffer = new Mat(current.size(), current.type());
        Core.absdiff(current, buffer, diffBuffer);
        Double normeDiffBuffer =
            new Double(Core.norm(diffBuffer)); // Calcul de la norme de cette matrice
        n.add(normeDiffBuffer); // Ajout de cette norme dans un conteneur
        compteur++; // Compteur du nombre d'images prises
        if (compteur > 11) {
          // S'il y a suffisamment d'images déjà prises, on vérifie que la porte est stable
          stable = true;
          int i = 0;
          while (stable && i < 10) {
            // On est stable si sur les dix dernières prises, la différence entre
            // l'image current est l'image stockée n'est pas trop grande
            if (n.get(compteur - 1 - i) > 4500) {
              stable = false;
            }
            i++;
          }
          if (stable) {
            Log.i(TAG, "Prise de la photo");
            // Si l'image est stable, il faut vérifier tout d'abord que la porte n'est pas fermée.
            // (on effectue ici le même traîtement que pour une détection de porte fermée)
            Scalar scalaireN = new Scalar(0x00, 0x00, 0x00, 0xFF);
            Mat noir = new Mat(current.size(), current.type(), scalaireN);
            Mat diffNoir = new Mat(current.size(), current.type());
            Core.absdiff(current, noir, diffNoir);
            Double normeDiffNoir = new Double(Core.norm(diffNoir));
            if (normeDiffNoir > 4500) {
              // Si la porte n'est pas fermée, on va sauvegarder l'image avant de l'envoyer
              File pictureFileDir = getDir();
              SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-yyyy-HH.mm.ss");
              String date = dateFormat.format(new Date());
              String photoFile = "PictureCV_" + date + ".jpg"; // Nom du fichier
              String filename = pictureFileDir.getPath() + File.separator + photoFile;
              // On doit convertir les couleurs avant de sauvegarder l'image.
              // La description de la fonction cvtColor explique pourquoi
              Imgproc.cvtColor(current, current, Imgproc.COLOR_BGR2RGB);
              Highgui.imwrite(filename, current); // Sauvegarde
              Log.i(TAG, "Photo sauvegardée");
              // Remise à 0 du compteur s'il doit être réutilisé pour une nouvelle photo
              // De même pour le tableau n
              compteur = 0;
              n.clear();

              /*
              //Tentative de reconnaissance d'image
              //On va essayer de détecter la présence d'une banane pour chaque nouvelle image
                	//captée par le téléphone
                	Mat Grey = inputFrame.gray(); //Image prise par la caméra
                	MatOfRect bananas = new MatOfRect();
                	Size minSize = new Size(30,20);
                	Size maxSize = new Size(150,100);
                	Log.i(TAG, "Tentative de détection de banane");
                	mCascadeClassifier.detectMultiScale(Grey, bananas, 1.1, 0, 10,minSize,maxSize);
                	if (bananas.rows()>0){
                		Log.i(TAG, "Nombre de bananes détectées : " + bananas.rows());
                	}
              envoiPhoto(filename, bananas.rows()); //Envoi de la photo avec les données de reconnaissance
              //Fin de la reconnaissance de l'image
              */

              envoiPhoto(filename); // Envoi de la photo sans les données de reconnaissance

            } else {
              // Cas où a porte est fermée
              // Remise à 0 du compteur s'il doit être réutilisé pour une nouvelle photo
              // De même pour le tableau n
              compteur = 0;
              n.clear();
              finish();
            }
          }
        }
        buffer = current.clone();
      }
    }
    return inputFrame.rgba();
  }