private static Bitmap canny(Bitmap image) {

    // convert image to matrix
    Mat Mat1 = new Mat(image.getWidth(), image.getHeight(), CvType.CV_32FC1);
    Utils.bitmapToMat(image, Mat1);

    // create temporary matrix2
    Mat Mat2 = new Mat(image.getWidth(), image.getHeight(), CvType.CV_32FC1);

    // convert image to grayscale
    Imgproc.cvtColor(Mat1, Mat2, Imgproc.COLOR_BGR2GRAY);

    // doing a gaussian blur prevents getting a lot of false hits
    Imgproc.GaussianBlur(Mat2, Mat1, new Size(3, 3), 2, 2); // ?

    // now apply canny function
    int param_threshold1 = 25; // manually defined
    int param_threshold2 = param_threshold1 * 3; // Cannys recommendation
    Imgproc.Canny(Mat1, Mat2, param_threshold1, param_threshold2);

    // ?
    Imgproc.cvtColor(Mat2, Mat1, Imgproc.COLOR_GRAY2BGRA, 4);

    // convert matrix to output bitmap
    Bitmap output = Bitmap.createBitmap(image.getWidth(), image.getHeight(), Bitmap.Config.RGB_565);
    Utils.matToBitmap(Mat1, output);
    return output;
  }
예제 #2
0
  /**
   * Locate rectangles in an image
   *
   * @param grayImage Grayscale image
   * @return Rectangle locations
   */
  public RectangleLocationResult locateRectangles(Mat grayImage) {
    Mat gray = grayImage.clone();

    // Filter out some noise
    Filter.downsample(gray, 2);
    Filter.upsample(gray, 2);

    Mat cacheHierarchy = new Mat();
    Mat grayTemp = new Mat();
    List<Rectangle> rectangles = new ArrayList<>();
    List<Contour> contours = new ArrayList<>();

    Imgproc.Canny(gray, grayTemp, 0, THRESHOLD_CANNY, APERTURE_CANNY, true);
    Filter.dilate(gray, 2);

    List<MatOfPoint> contoursTemp = new ArrayList<>();
    // Find contours - the parameters here are very important to compression and retention
    Imgproc.findContours(
        grayTemp, contoursTemp, cacheHierarchy, Imgproc.CV_RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    // For each contour, test whether the contour is a rectangle
    // List<Contour> contours = new ArrayList<>();
    MatOfPoint2f approx = new MatOfPoint2f();
    for (MatOfPoint co : contoursTemp) {
      MatOfPoint2f matOfPoint2f = new MatOfPoint2f(co.toArray());
      Contour c = new Contour(co);

      // Attempt to fit the contour to the best polygon
      Imgproc.approxPolyDP(
          matOfPoint2f, approx, c.arcLength(true) * EPLISON_APPROX_TOLERANCE_FACTOR, true);

      Contour approxContour = new Contour(approx);

      // Make sure the contour is big enough, CLOSED (convex), and has exactly 4 points
      if (approx.toArray().length == 4
          && Math.abs(approxContour.area()) > 1000
          && approxContour.isClosed()) {

        // TODO contours and rectangles array may not match up, but why would they?
        contours.add(approxContour);

        // Check each angle to be approximately 90 degrees
        double maxCosine = 0;
        for (int j = 2; j < 5; j++) {
          double cosine =
              Math.abs(
                  MathUtil.angle(
                      approx.toArray()[j % 4], approx.toArray()[j - 2], approx.toArray()[j - 1]));
          maxCosine = Math.max(maxCosine, cosine);
        }

        if (maxCosine < MAX_COSINE_VALUE) {
          // Convert the points to a rectangle instance
          rectangles.add(new Rectangle(approx.toArray()));
        }
      }
    }

    return new RectangleLocationResult(contours, rectangles);
  }
예제 #3
0
파일: Square.java 프로젝트: emre801/PokeEye
  private static Mat findLargestRectangle(Mat original_image) {
    Mat imgSource = original_image.clone();

    // convert the image to black and white
    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);

    // convert the image to black and white does (8 bit)
    Imgproc.Canny(imgSource, imgSource, 50, 50);

    // apply gaussian blur to smoothen lines of dots
    Imgproc.GaussianBlur(imgSource, imgSource, new Size(5, 5), 5);

    // find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(
        imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    MatOfPoint temp_contour = contours.get(0); // the largest is at the
    // index 0 for starting
    // point
    MatOfPoint2f approxCurve = new MatOfPoint2f();
    List<MatOfPoint> largest_contours = new ArrayList<MatOfPoint>();
    for (int idx = 0; idx < contours.size(); idx++) {
      temp_contour = contours.get(idx);
      double contourarea = Imgproc.contourArea(temp_contour);
      // compare this contour to the previous largest contour found
      if (contourarea > maxArea) {
        // check if this contour is a square
        MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
        int contourSize = (int) temp_contour.total();
        Imgproc.approxPolyDP(new_mat, approxCurve, contourSize * 0.05, true);
        if (approxCurve.total() == 4) {
          maxArea = contourarea;
          largest_contours.add(temp_contour);
        }
      }
    }
    MatOfPoint temp_largest = largest_contours.get(largest_contours.size() - 1);
    largest_contours = new ArrayList<MatOfPoint>();

    largest_contours.add(temp_largest);

    // Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BayerBG2RGB);
    Imgproc.drawContours(original_image, largest_contours, -1, new Scalar(0, 255, 0), 10);

    // Mat perspectiveTransform = new Mat(3, 3, CvType.CV_32FC1);
    // Imgproc.warpPerspective(original_image, imgSource,
    // perspectiveTransform, new Size(300,300));

    Highgui.imwrite(output, original_image);

    // create the new image here using the largest detected square

    // Toast.makeText(getApplicationContext(), "Largest Contour: ",
    // Toast.LENGTH_LONG).show();

    return imgSource;
  }
예제 #4
0
  /**
   * @param image
   * @param size
   * @return Mat
   */
  public static Mat cannyDetection(Mat image, int size) {

    Mat grayImage = new Mat();
    Mat detectedEdges = new Mat();
    // convert to grayscale
    Imgproc.cvtColor(image, grayImage, Imgproc.COLOR_BGR2GRAY);
    // reduce noise with a 3x3 kernel
    Imgproc.blur(grayImage, detectedEdges, new Size(3, 3));
    // canny detector, with ratio of lower:upper threshold of 3:1
    Imgproc.Canny(detectedEdges, detectedEdges, size, size / 3, 7, false);
    return detectedEdges;
  }
예제 #5
0
  /**
   * Locate ellipses within an image
   *
   * @param grayImage Grayscale image
   * @return Ellipse locations
   */
  public EllipseLocationResult locateEllipses(Mat grayImage) {
    Mat gray = grayImage.clone();

    Filter.downsample(gray, 2);
    Filter.upsample(gray, 2);

    Imgproc.Canny(gray, gray, 5, 75, 3, true);
    Filter.dilate(gray, 2);

    Mat cacheHierarchy = new Mat();

    List<MatOfPoint> contoursTemp = new ArrayList<>();
    // Find contours - the parameters here are very important to compression and retention
    Imgproc.findContours(
        gray, contoursTemp, cacheHierarchy, Imgproc.CV_RETR_TREE, Imgproc.CHAIN_APPROX_TC89_KCOS);

    // List contours
    List<Contour> contours = new ArrayList<>();
    for (MatOfPoint co : contoursTemp) {
      contours.add(new Contour(co));
    }

    // Find ellipses by finding fit
    List<Ellipse> ellipses = new ArrayList<>();
    for (MatOfPoint co : contoursTemp) {
      contours.add(new Contour(co));
      // Contour must have at least 6 points for fitEllipse
      if (co.toArray().length < 6) continue;
      // Copy MatOfPoint to MatOfPoint2f
      MatOfPoint2f matOfPoint2f = new MatOfPoint2f(co.toArray());
      // Fit an ellipse to the current contour
      Ellipse ellipse = new Ellipse(Imgproc.fitEllipse(matOfPoint2f));

      // Draw ellipse
      ellipses.add(ellipse);
    }

    return new EllipseLocationResult(contours, ellipses);
  }
예제 #6
0
  public void processWithContours(Mat in, Mat out) {
    int playSquares = 32; // number of playable game board squares

    // keep track of starting row square
    int parity = 0; // 0 is even, 1 is odd, tied to row number
    int count = 0; // row square
    int rowNum = 0; // row number, starting at 0

    int vsegment = in.rows() / 8; // only accounts 8 playable
    int hsegment = in.cols() / 10; // 8 playable, 2 capture
    int hOffset = hsegment * 2; // offset for playable board
    int vOffset = vsegment + 40;

    // For angle of camera
    int dx = 80;
    int ddx = 0;
    hsegment -= 16;

    int dy = 20;
    vsegment -= 24;
    int ddy = 0;

    // Go through all playable squares
    for (int i = 0; i < playSquares; i++) {
      // change offset depending on the row
      if (parity == 0) // playable squares start on 2nd square from left
      {
        if (rowNum >= 5) dx -= 3;
        hOffset = hsegment * 2 + dx;
      } else // playable squares start on immediate left
      {
        if (rowNum >= 5) dx -= 3;
        hOffset = hsegment + dx;
      }

      if (rowNum == 0) ddy = 5;
      if (rowNum == 4) if (count == 6) ddx = 10;
      if (rowNum == 5) {
        if (count == 0) ddx = -6;
        else if (count == 2) ddx = 6;
        else if (count == 4) ddx = 12;
        else if (count == 6) ddx = 20;
      }
      if (rowNum == 6) {
        if (count == 0) ddx = 0;
        else if (count == 2) ddx = 16;
        else if (count == 4) ddx = 32;
        else if (count == 6) ddx = 40;
      }
      if (rowNum == 7) {
        if (count == 0) ddx = 6;
        else if (count == 2) ddx = 24;
        else if (count == 4) ddx = 40;
        else ddx = 52;
      }

      // find where roi should be
      // System.out.println("" + vOffset);
      Point p1 =
          new Point(
              hOffset + count * hsegment + ddx + 5,
              vOffset + rowNum * vsegment - dy - 5 - ddy); // top left point of rectangle (x,y)
      Point p2 =
          new Point(
              hOffset + (count + 1) * hsegment + ddx - 5,
              vOffset
                  + (rowNum + 1) * vsegment
                  - dy
                  - 5
                  - ddy); // bottom right point of rectangle (x,y)

      // create rectangle that is board square
      Rect bound = new Rect(p1, p2);

      Mat roi;
      char color;
      if (i == 0) {
        // frame only includes rectangle
        roi = new Mat(in, bound);

        // get the color
        color = identifyColor(roi);

        // copy input image to output image
        in.copyTo(out);
      } else {
        // frame only includes rectangle
        roi = new Mat(out, bound);

        // get the color
        color = identifyColor(roi);
      }

      Imgproc.cvtColor(roi, roi, Imgproc.COLOR_BGR2GRAY); // change to single color

      Mat canny = new Mat();
      Imgproc.Canny(roi, canny, 20, 40); // make image a canny image that is only edges; 2,4
      // lower threshold values find more edges
      List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
      Mat hierarchy = new Mat(); // holds nested contour information
      Imgproc.findContours(
          canny,
          contours,
          hierarchy,
          Imgproc.RETR_LIST,
          Imgproc.CHAIN_APPROX_SIMPLE); // Imgproc.RETR_LIST, TREE

      System.out.println(++test + "\t" + contours.size());

      if (contours.size() > 3) // or error value for color is below 1
      {
        switch (color) {
          case COLOR_BLUE:
            // Imgproc.rectangle(out, p1, p2, new Scalar(255, 0, 0), 2);
            Core.rectangle(out, p1, p2, new Scalar(255, 0, 0), 2);
            board[i] = CheckersBoard.BLACK; // end user's piece
            break;
          case COLOR_ORANGE:
            // Imgproc.rectangle(out, p1, p2, new Scalar(0, 128, 255), 2);
            Core.rectangle(out, p1, p2, new Scalar(0, 128, 255), 2);
            board[i] = CheckersBoard.WHITE; // system's piece
            break;
          case COLOR_WHITE:
            // Imgproc.rectangle(out, p1, p2, new Scalar(255, 255, 255), 2);
            Core.rectangle(out, p1, p2, new Scalar(255, 255, 255), 2);
            board[i] = CheckersBoard.EMPTY;
            break;
          case COLOR_BLACK: // this is black
            // Imgproc.rectangle(out, p1, p2, new Scalar(0, 0, 0), 2);
            Core.rectangle(
                out,
                p1,
                p2,
                new Scalar(0, 0, 0),
                2); // maybe add 8, 0 as line type and fractional bits
            board[i] = CheckersBoard.EMPTY;
            break;
        }
      }

      System.out.println("in color switch " + board[i]);
      count += 2;
      if (count == 8) {
        parity = ++parity % 2; // change odd or even
        count = 0;
        rowNum++;
        hsegment += 2;
        dx -= 10;
        dy += 10;
        vsegment += 3;
        ddy = 0;
      }
    }
  }
예제 #7
0
  /**
   * Determines which pieces are kings
   *
   * @param in Mat image of board
   */
  public void determineKings(Mat in) {
    int playSquares = 32;

    Mat dst = new Mat(in.rows(), in.cols(), in.type());
    in.copyTo(dst);

    Imgproc.cvtColor(dst, dst, Imgproc.COLOR_BGR2GRAY); // change to single color

    Mat canny = new Mat();
    Imgproc.Canny(dst, canny, 100, 200); // make image a canny image that is only edges; 2,4
    // lower threshold values find more edges
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat(); // holds nested contour information
    Imgproc.findContours(
        canny,
        contours,
        hierarchy,
        Imgproc.RETR_LIST,
        Imgproc.CHAIN_APPROX_SIMPLE); // Imgproc.RETR_LIST, TREE

    // draw contour image
    Mat mask = new Mat();
    mask = Mat.zeros(dst.size(), dst.type());
    Imgproc.drawContours(
        mask, contours, -1, new Scalar(255, 255, 255), 1, 8, hierarchy, 2, new Point());
    Highgui.imwrite("contours.jpg", mask);

    ArrayList occupied = new ArrayList<Integer>();
    for (int i = 0; i < playSquares; i++) {
      if (board[i] != 0) occupied.add(i);
    }

    for (int i = 0; i < contours.size(); i++) // assuming only contours are checker pieces
    {
      // determine if it should be a king
      // use Rect r = Imgproc.boundingRect then find height of it by r.height

      // Get bounding rect of contour
      Rect bound = Imgproc.boundingRect(contours.get(i));

      if (bound.height > in.rows() / 8) {
        // board[(int) occupied.get(0)]++; // make it a king
        // occupied.remove(0);
      }
    }

    // or apply to each region of interest

    /*
    // keep track of starting row square
    int parity = 0; // 0 is even, 1 is odd, tied to row number
    int count = 0; // row square
    int rowNum = 0; // row number, starting at 0

    int vsegment = in.rows() / 8; // only accounts 8 playable
    int hsegment = in.cols() / 12; // 8 playable, 2 capture, 2 extra
    int offset = hsegment * 2; // offset for playable board

    // For angle of camera
    int dx = 48;
    hsegment -= 8;


    // Go through all playable squares
    for (int i = 0; i < playSquares; i++)
    {
    	// change offset depending on the row
    	if (parity == 0) // playable squares start on immediate left
    		offset = hsegment * 3 + dx;
    	else // playable squares start on 2nd square from left
    		offset = hsegment * 2 + dx;

    	// find where roi should be
    	Point p1 = new Point(offset + count * hsegment, rowNum * vsegment); // top left point of rectangle (x,y)
    	Point p2 = new Point(offset + (count + 1) * hsegment, (rowNum + 1) * vsegment); // bottom right point of rectangle (x,y)

    	// create rectangle that is board square
    	Rect bound = new Rect(p1, p2);

    	// frame only includes rectangle
    	Mat roi = new Mat(in, bound);

           Imgproc.cvtColor(roi, roi, Imgproc.COLOR_BGR2GRAY); // change to single color

           Mat canny = new Mat();
           Imgproc.Canny(roi, canny, 2, 4); // make image a canny image that is only edges; 2,4
           // lower threshold values find more edges
           List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
           Mat hierarchy = new Mat(); // holds nested contour information
           Imgproc.findContours(canny, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); // Imgproc.RETR_LIST, TREE

           // Get bounding rect of contour
              Rect rect = Imgproc.boundingRect(contours.get(0));

              if (rect.height > in.rows() / 8)
    	{
    		board[i]++; // make it a king
    	}

    	count += 2;
    	if (count == 8)
    	{
    		parity = ++parity % 2; // change odd or even
    		count = 0;
    		rowNum++;
    		hsegment += 1;
    		dx -= 6;
    	}
    }*/
  }
예제 #8
0
  @Override
  protected Bitmap processFrame(VideoCapture capture) {
    Time[] measureTime = new Time[9];
    String[] compDescStrings = {
      "Total processFrame",
      "Grab a new frame",
      "MatToBitmap",
      "Publish cameraInfo",
      "Create ImageMsg",
      "Compress image",
      "Transfer to Stream",
      "Image.SetData",
      "Publish Image",
      "Total econds per frame"
    };
    String[] rawDescStrings = {
      "Total processFrame",
      "Grab a new frame",
      "MatToBitmap",
      "Publish cameraInfo",
      "Create ImageMsg",
      "Pixel to buffer",
      "Transfer to Stream",
      "Image.SetData",
      "Publish Image",
      "Total seconds per frame"
    };

    measureTime[0] = connectedNode.getCurrentTime();

    switch (MainActivity.viewMode) {
      case MainActivity.VIEW_MODE_GRAY:
        //	            capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
        capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_GREY_FRAME);
        //	            Imgproc.cvtColor(mGray, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
        break;
      case MainActivity.VIEW_MODE_RGBA:
        capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
        //            Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3, 2, new
        // Scalar(255, 0, 0, 255), 3);
        break;
      case MainActivity.VIEW_MODE_CANNY:
        capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
        Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
        Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
        break;
    }
    Time currentTime = connectedNode.getCurrentTime();

    measureTime[1] = connectedNode.getCurrentTime();

    if (bmp == null) bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

    if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_NONE
        && bb == null) {
      Log.i(TAG, "Buffer 1");
      bb = ByteBuffer.allocate(bmp.getRowBytes() * bmp.getHeight());
      Log.i(TAG, "Buffer 2");
      bb.clear();
      Log.i(TAG, "Buffer 3");
    }
    try {
      Utils.matToBitmap(mRgba, bmp);
      measureTime[2] = connectedNode.getCurrentTime();

      cameraInfo = cameraInfoPublisher.newMessage();
      cameraInfo.getHeader().setFrameId("camera");
      cameraInfo.getHeader().setStamp(currentTime);
      cameraInfo.setWidth(640);
      cameraInfo.setHeight(480);
      cameraInfoPublisher.publish(cameraInfo);
      measureTime[3] = connectedNode.getCurrentTime();

      if (MainActivity.imageCompression >= MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG) {
        // Compressed image

        sensor_msgs.CompressedImage image = imagePublisher.newMessage();
        if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG)
          image.setFormat("png");
        else if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_JPEG)
          image.setFormat("jpeg");
        image.getHeader().setStamp(currentTime);
        image.getHeader().setFrameId("camera");
        measureTime[4] = connectedNode.getCurrentTime();

        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG)
          bmp.compress(Bitmap.CompressFormat.PNG, 100, baos);
        else if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_JPEG)
          bmp.compress(Bitmap.CompressFormat.JPEG, MainActivity.imageCompressionQuality, baos);
        measureTime[5] = connectedNode.getCurrentTime();

        stream.buffer().writeBytes(baos.toByteArray());
        measureTime[6] = connectedNode.getCurrentTime();

        image.setData(stream.buffer().copy());
        measureTime[7] = connectedNode.getCurrentTime();

        stream.buffer().clear();
        imagePublisher.publish(image);
        measureTime[8] = connectedNode.getCurrentTime();
      } else {
        // Raw image

        Log.i(TAG, "Raw image 1");
        sensor_msgs.Image rawImage = rawImagePublisher.newMessage();
        rawImage.getHeader().setStamp(currentTime);
        rawImage.getHeader().setFrameId("camera");
        rawImage.setEncoding("rgba8");
        rawImage.setWidth(bmp.getWidth());
        rawImage.setHeight(bmp.getHeight());
        rawImage.setStep(640);
        measureTime[4] = connectedNode.getCurrentTime();

        Log.i(TAG, "Raw image 2");

        bmp.copyPixelsToBuffer(bb);
        measureTime[5] = connectedNode.getCurrentTime();

        Log.i(TAG, "Raw image 3");

        stream.buffer().writeBytes(bb.array());
        bb.clear();
        measureTime[6] = connectedNode.getCurrentTime();

        Log.i(TAG, "Raw image 4");

        rawImage.setData(stream.buffer().copy());
        stream.buffer().clear();
        measureTime[7] = connectedNode.getCurrentTime();

        Log.i(TAG, "Raw image 5");

        rawImagePublisher.publish(rawImage);
        measureTime[8] = connectedNode.getCurrentTime();
        Log.i(TAG, "Raw image 6");
      }

      newTime = connectedNode.getCurrentTime();
      stats[9][counter] = (newTime.subtract(oldTime)).nsecs / 1000000.0;
      oldTime = newTime;

      for (int i = 1; i < 9; i++) {
        stats[i][counter] = (measureTime[i].subtract(measureTime[i - 1])).nsecs / 1000000.0;
      }

      stats[0][counter] = measureTime[8].subtract(measureTime[0]).nsecs / 1000000.0;

      counter++;
      if (counter == numSamples) {
        double[] sts = new double[10];
        Arrays.fill(sts, 0.0);

        for (int i = 0; i < 10; i++) {
          for (int j = 0; j < numSamples; j++) sts[i] += stats[i][j];

          sts[i] /= (double) numSamples;

          if (MainActivity.imageCompression >= MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG)
            Log.i(TAG, String.format("Mean time for %s:\t\t%4.2fms", compDescStrings[i], sts[i]));
          else Log.i(TAG, String.format("Mean time for %s:\t\t%4.2fms", rawDescStrings[i], sts[i]));
        }
        Log.i(TAG, "\n\n");
        counter = 0;
      }

      return bmp;
    } catch (Exception e) {
      Log.e(TAG, "Frame conversion and publishing throws an exception: " + e.getMessage());
      bmp.recycle();
      return null;
    }
  }
예제 #9
0
  public void run() {
    ArrayList<Geometry.Quad> squares;

    Mat image = new Mat();
    Utils.bitmapToMat(source, image);

    Mat bwimage = new Mat();
    cvtColor(image, bwimage, COLOR_RGB2GRAY);

    Mat blurred = new Mat();
    medianBlur(image, blurred, 9);

    int width = blurred.width();
    int height = blurred.height();
    int depth = blurred.depth();

    Mat gray0 = new Mat(width, height, depth);
    blurred.copyTo(gray0);

    squares = new ArrayList<Geometry.Quad>();

    // find squares in every color plane of the image
    for (int c = 0; c < 3; c++) {
      Core.mixChannels(
          Arrays.asList(blurred), Arrays.asList(new Mat[] {gray0}), new MatOfInt(c, 0));

      // try several threshold levels
      int thresholdLevel = 8;
      for (int l = 0; l < thresholdLevel; l++) {
        // use canny instead of 0 threshold level
        // canny helps catch squares with gradient shading
        Mat gray = new Mat();

        if (l == 0) {
          Canny(gray0, gray, 10.0, 20.0, 3, false);
          Mat kernel = new Mat(11, 11, CvType.CV_8UC1, new Scalar(1));
          dilate(gray, gray, kernel);
        } else {
          Mat thresh = new Mat(gray0.rows(), gray0.cols(), gray0.type());
          threshold(gray0, thresh, ((double) l) / thresholdLevel * 255, 128, THRESH_BINARY_INV);
          cvtColor(thresh, gray, COLOR_BGR2GRAY);
        }

        // find contours and store them in a list
        List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
        findContours(gray, contours, new Mat(), RETR_LIST, CHAIN_APPROX_SIMPLE);

        // test contours
        for (MatOfPoint contour : contours) {
          // approximate contour with accuracy proportional to the contour perimeter
          MatOfPoint2f thisContour = new MatOfPoint2f(contour.toArray());
          double arclength = 0.02 * arcLength(thisContour, true);
          MatOfPoint2f approx = new MatOfPoint2f();
          approxPolyDP(thisContour, approx, arclength, true);

          double area = contourArea(approx);
          boolean isConvex = isContourConvex(new MatOfPoint(approx.toArray()));

          if (approx.rows() == 4 && Math.abs(area) > SQUARE_SIZE && isConvex) {
            double maxCosine = 0;

            Point[] approxArray = approx.toArray();
            for (int j = 2; j < 5; j++) {
              double cosine =
                  Math.abs(angle(approxArray[j % 4], approxArray[j - 2], approxArray[j - 1]));
              maxCosine = Math.max(maxCosine, cosine);
            }

            if (maxCosine > THRESHOLD_COS) {
              squares.add(new Geometry.Quad(approxArray));
              Log.d(TAG, "area = " + area);
            }
          }
        }
      }
    }

    result = new Bundle();
    result.putParcelableArrayList("squares", squares);
    Log.d(TAG, "result created");

    finish();
  }
  public Mat onCameraFrame(Mat inputFrame) {
    inputFrame.copyTo(mRgba);

    switch (ImageManipulationsActivity.viewMode) {
      case ImageManipulationsActivity.VIEW_MODE_RGBA:
        break;

      case ImageManipulationsActivity.VIEW_MODE_HIST:
        if ((mSizeRgba == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        int thikness = (int) (mSizeRgba.width / (mHistSizeNum + 10) / 5);
        if (thikness > 5) thikness = 5;
        int offset = (int) ((mSizeRgba.width - (5 * mHistSizeNum + 4 * 10) * thikness) / 2);
        // RGB
        for (int c = 0; c < 3; c++) {
          Imgproc.calcHist(Arrays.asList(mRgba), mChannels[c], mMat0, mHist, mHistSize, mRanges);
          Core.normalize(mHist, mHist, mSizeRgba.height / 2, 0, Core.NORM_INF);
          mHist.get(0, 0, mBuff);
          for (int h = 0; h < mHistSizeNum; h++) {
            mP1.x = mP2.x = offset + (c * (mHistSizeNum + 10) + h) * thikness;
            mP1.y = mSizeRgba.height - 1;
            mP2.y = mP1.y - 2 - (int) mBuff[h];
            Core.line(mRgba, mP1, mP2, mColorsRGB[c], thikness);
          }
        }
        // Value and Hue
        Imgproc.cvtColor(mRgba, mIntermediateMat, Imgproc.COLOR_RGB2HSV_FULL);
        // Value
        Imgproc.calcHist(
            Arrays.asList(mIntermediateMat), mChannels[2], mMat0, mHist, mHistSize, mRanges);
        Core.normalize(mHist, mHist, mSizeRgba.height / 2, 0, Core.NORM_INF);
        mHist.get(0, 0, mBuff);
        for (int h = 0; h < mHistSizeNum; h++) {
          mP1.x = mP2.x = offset + (3 * (mHistSizeNum + 10) + h) * thikness;
          mP1.y = mSizeRgba.height - 1;
          mP2.y = mP1.y - 2 - (int) mBuff[h];
          Core.line(mRgba, mP1, mP2, mWhilte, thikness);
        }
        // Hue
        Imgproc.calcHist(
            Arrays.asList(mIntermediateMat), mChannels[0], mMat0, mHist, mHistSize, mRanges);
        Core.normalize(mHist, mHist, mSizeRgba.height / 2, 0, Core.NORM_INF);
        mHist.get(0, 0, mBuff);
        for (int h = 0; h < mHistSizeNum; h++) {
          mP1.x = mP2.x = offset + (4 * (mHistSizeNum + 10) + h) * thikness;
          mP1.y = mSizeRgba.height - 1;
          mP2.y = mP1.y - 2 - (int) mBuff[h];
          Core.line(mRgba, mP1, mP2, mColorsHue[h], thikness);
        }
        break;

      case ImageManipulationsActivity.VIEW_MODE_CANNY:
        if ((mRgbaInnerWindow == null)
            || (mGrayInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
        Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
        break;

      case ImageManipulationsActivity.VIEW_MODE_SOBEL:
        Imgproc.cvtColor(mRgba, mGray, Imgproc.COLOR_RGBA2GRAY);

        if ((mRgbaInnerWindow == null)
            || (mGrayInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();

        Imgproc.Sobel(mGrayInnerWindow, mIntermediateMat, CvType.CV_8U, 1, 1);
        Core.convertScaleAbs(mIntermediateMat, mIntermediateMat, 10, 0);
        Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_GRAY2BGRA, 4);
        break;

      case ImageManipulationsActivity.VIEW_MODE_SEPIA:
        Core.transform(mRgba, mRgba, mSepiaKernel);
        break;

      case ImageManipulationsActivity.VIEW_MODE_ZOOM:
        if ((mZoomCorner == null)
            || (mZoomWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        Imgproc.resize(mZoomWindow, mZoomCorner, mZoomCorner.size());

        Size wsize = mZoomWindow.size();
        Core.rectangle(
            mZoomWindow,
            new Point(1, 1),
            new Point(wsize.width - 2, wsize.height - 2),
            new Scalar(255, 0, 0, 255),
            2);
        break;

      case ImageManipulationsActivity.VIEW_MODE_PIXELIZE:
        if ((mRgbaInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        Imgproc.resize(mRgbaInnerWindow, mIntermediateMat, mSize0, 0.1, 0.1, Imgproc.INTER_NEAREST);
        Imgproc.resize(
            mIntermediateMat, mRgbaInnerWindow, mSizeRgbaInner, 0., 0., Imgproc.INTER_NEAREST);
        break;

      case ImageManipulationsActivity.VIEW_MODE_POSTERIZE:
        if ((mRgbaInnerWindow == null)
            || (mRgba.cols() != mSizeRgba.width)
            || (mRgba.height() != mSizeRgba.height)) CreateAuxiliaryMats();
        /*
        Imgproc.cvtColor(mRgbaInnerWindow, mIntermediateMat, Imgproc.COLOR_RGBA2RGB);
        Imgproc.pyrMeanShiftFiltering(mIntermediateMat, mIntermediateMat, 5, 50);
        Imgproc.cvtColor(mIntermediateMat, mRgbaInnerWindow, Imgproc.COLOR_RGB2RGBA);
        */

        Imgproc.Canny(mRgbaInnerWindow, mIntermediateMat, 80, 90);
        mRgbaInnerWindow.setTo(new Scalar(0, 0, 0, 255), mIntermediateMat);
        Core.convertScaleAbs(mRgbaInnerWindow, mIntermediateMat, 1. / 16, 0);
        Core.convertScaleAbs(mIntermediateMat, mRgbaInnerWindow, 16, 0);
        break;
    }

    return mRgba;
  }
예제 #11
0
파일: Square.java 프로젝트: emre801/PokeEye
  public static void getSquare(Mat imgSource) {
    Mat sourceImage = imgSource.clone();
    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);
    // convert the image to black and white does (8 bit)
    Imgproc.Canny(imgSource, imgSource, 50, 50);

    // apply gaussian blur to smoothen lines of dots
    Imgproc.GaussianBlur(imgSource, imgSource, new org.opencv.core.Size(5, 5), 5);

    // find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(
        imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    int maxAreaIdx = -1;
    // Log.d("size",Integer.toString(contours.size()));
    MatOfPoint temp_contour = contours.get(0); // the largest is at the
    // index 0 for starting
    // point
    MatOfPoint2f approxCurve = new MatOfPoint2f();
    MatOfPoint largest_contour = contours.get(0);
    // largest_contour.ge
    List<MatOfPoint> largest_contours = new ArrayList<MatOfPoint>();
    // Imgproc.drawContours(imgSource,contours, -1, new Scalar(0, 255, 0),
    // 1);

    for (int idx = 0; idx < contours.size(); idx++) {
      temp_contour = contours.get(idx);
      double contourarea = Imgproc.contourArea(temp_contour);
      // compare this contour to the previous largest contour found
      if (contourarea > maxArea) {
        // check if this contour is a square
        MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
        int contourSize = (int) temp_contour.total();
        MatOfPoint2f approxCurve_temp = new MatOfPoint2f();
        Imgproc.approxPolyDP(new_mat, approxCurve_temp, contourSize * 0.05, true);
        if (approxCurve_temp.total() == 4) {
          maxArea = contourarea;
          maxAreaIdx = idx;
          approxCurve = approxCurve_temp;
          largest_contour = temp_contour;
        }
      }
    }

    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BayerBG2RGB);

    double[] temp_double;
    temp_double = approxCurve.get(0, 0);
    Point p1 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p1,55,new Scalar(0,0,255));
    // Imgproc.warpAffine(sourceImage, dummy, rotImage,sourceImage.size());
    temp_double = approxCurve.get(1, 0);
    Point p2 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p2,150,new Scalar(255,255,255));
    temp_double = approxCurve.get(2, 0);
    Point p3 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p3,200,new Scalar(255,0,0));
    temp_double = approxCurve.get(3, 0);
    Point p4 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p4,100,new Scalar(0,0,255));
    List<Point> source = getCorners(p1, p2, p3, p4);
    for (Point p : source) {
      // System.out.println(p);
    }
    Mat startM = Converters.vector_Point2f_to_Mat(source);
    // Imgproc.cvtColor(sourceImage, sourceImage, Imgproc.COLOR_BGR2GRAY);
    Mat result = warp(sourceImage, startM, 5);
    // result = warp(result,result,1);
    // Imgproc.cvtColor(result, result, Imgproc.COLOR_BGR2GRAY);
    Highgui.imwrite(output, result);
    // System.out.println("Done");
    // return result;
  }