示例#1
0
  public boolean hasChanges(Mat current) {
    int PIXEL_DIFF_THRESHOLD = 5;
    int IMAGE_DIFF_THRESHOLD = 5;
    Mat bg = new Mat();
    Mat cg = new Mat();
    Mat diff = new Mat();
    Mat tdiff = new Mat();

    Imgproc.cvtColor(base, bg, Imgproc.COLOR_BGR2GRAY);
    Imgproc.cvtColor(current, cg, Imgproc.COLOR_BGR2GRAY);
    Core.absdiff(bg, cg, diff);
    Imgproc.threshold(diff, tdiff, PIXEL_DIFF_THRESHOLD, 0.0, Imgproc.THRESH_TOZERO);
    if (Core.countNonZero(tdiff) <= IMAGE_DIFF_THRESHOLD) {
      return false;
    }

    Imgproc.threshold(diff, diff, PIXEL_DIFF_THRESHOLD, 255, Imgproc.THRESH_BINARY);
    Imgproc.dilate(diff, diff, new Mat());
    Mat se = Imgproc.getStructuringElement(Imgproc.MORPH_ELLIPSE, new Size(5, 5));
    Imgproc.morphologyEx(diff, diff, Imgproc.MORPH_CLOSE, se);

    List<MatOfPoint> points = new ArrayList<MatOfPoint>();
    Mat contours = new Mat();
    Imgproc.findContours(diff, points, contours, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);
    int n = 0;
    for (Mat pm : points) {
      log(lvl, "(%d) %s", n++, pm);
      printMatI(pm);
    }
    log(lvl, "contours: %s", contours);
    printMatI(contours);
    return true;
  }
示例#2
0
  public static void draw_opticflow(Mat flow, Mat cflowmap, int step, Scalar color) {
    int i = 0, j = 0;
    for (int y = 0; y < cflowmap.rows(); y += step) {
      for (int x = 0; x < cflowmap.cols(); x += step) {

        double[] fxy = Sample1View.flow.get(50, 50);

        // for testing, getting change in values sa frames at point 50,50
        // result : fxy ng.change ang value, however, kun i.add na ang 50, mura xag ma.zero??
        i = (int) Math.round(fxy[0]);
        j = (int) (50.00 + fxy[1]);

        Core.putText(
            cflowmap,
            Double.toString(j),
            new Point(10, 100),
            3 /* CV_FONT_HERSHEY_COMPLEX */,
            2,
            new Scalar(255, 0, 0, 255),
            3);

        // Core.line(cflowmap, new Point(x,y), new Point(i,j), new Scalar(0,0,255), 4);
        // Core.circle(cflowmap, new Point(i,j), 2, new Scalar(0,0,255), -1);
        // Core.circle(cflowmap, new Point(x,y), 2, color, -1);
        // System.out.print("fxy"+i+" "+j+"\n");

        // Core.circle(cflowmap, new Point(cflowmap.width(), 50), 5, new Scalar(255,255,255), -1);
        if (i < (cflowmap.width() / 2) && i != 0.0) {
          Core.circle(cflowmap, new Point(i, j), 2, new Scalar(0, 0, 255), -1);
          Core.putText(
              cflowmap,
              "LEFT",
              new Point(10, 100),
              3 /* CV_FONT_HERSHEY_COMPLEX */,
              2,
              new Scalar(255, 255, 255, 255),
              3);

        } else if (i > (cflowmap.width() / 2) && i != x) {
          Core.circle(cflowmap, new Point(i, j), 2, new Scalar(0, 0, 255), -1);
          Core.putText(
              cflowmap,
              "RIGHT",
              new Point(10, 100),
              3 /* CV_FONT_HERSHEY_COMPLEX */,
              2,
              new Scalar(255, 255, 255, 255),
              3);
        }
      }
    }
  }
  @Override
  public void apply(Mat src, Mat dst) {
    Core.split(src, mChannels);

    final Mat r = mChannels.get(0);
    final Mat g = mChannels.get(1);
    final Mat b = mChannels.get(2);

    Core.min(b, r, b);
    Core.min(b, g, b);

    Core.merge(mChannels, dst);
  }
示例#4
0
文件: FdView.java 项目: nagyist/XFace
  private double match_eye(Rect area, Mat mTemplate, int type) {
    Point matchLoc;
    Mat mROI = mGray.submat(area);
    int result_cols = mGray.cols() - mTemplate.cols() + 1;
    int result_rows = mGray.rows() - mTemplate.rows() + 1;
    if (mTemplate.cols() == 0 || mTemplate.rows() == 0) {
      return 0.0;
    }
    mResult = new Mat(result_cols, result_rows, CvType.CV_32FC1);

    switch (type) {
      case TM_SQDIFF:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_SQDIFF);
        break;
      case TM_SQDIFF_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_SQDIFF_NORMED);
        break;
      case TM_CCOEFF:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCOEFF);
        break;
      case TM_CCOEFF_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCOEFF_NORMED);
        break;
      case TM_CCORR:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCORR);
        break;
      case TM_CCORR_NORMED:
        Imgproc.matchTemplate(mROI, mTemplate, mResult, Imgproc.TM_CCORR_NORMED);
        break;
    }

    Core.MinMaxLocResult mmres = Core.minMaxLoc(mResult);

    if (type == TM_SQDIFF || type == TM_SQDIFF_NORMED) {
      matchLoc = mmres.minLoc;
    } else {
      matchLoc = mmres.maxLoc;
    }

    Point matchLoc_tx = new Point(matchLoc.x + area.x, matchLoc.y + area.y);
    Point matchLoc_ty =
        new Point(matchLoc.x + mTemplate.cols() + area.x, matchLoc.y + mTemplate.rows() + area.y);

    Core.rectangle(mRgba, matchLoc_tx, matchLoc_ty, new Scalar(255, 255, 0, 255));

    if (type == TM_SQDIFF || type == TM_SQDIFF_NORMED) {
      return mmres.maxVal;
    } else {
      return mmres.minVal;
    }
  }
 private void displayMarkersDebug(Mat imgMat, Scalar contourColor, Scalar codesColor) {
   ArrayList<MatOfPoint> components = new ArrayList<MatOfPoint>();
   Mat hierarchy = new Mat();
   DtouchMarker marker = new DtouchMarker();
   boolean markerFound = findMarkers(imgMat, marker, components, hierarchy);
   if (markerFound) {
     String code = codeArrayToString(marker.getCode());
     Point codeLocation = new Point(imgMat.cols() / 4, imgMat.rows() / 8);
     Core.putText(mRgba, code, codeLocation, Core.FONT_HERSHEY_COMPLEX, 1, codesColor, 3);
     Imgproc.drawContours(
         mRgba,
         components,
         marker.getComponentIndex(),
         contourColor,
         3,
         8,
         hierarchy,
         2,
         new Point(0, 0));
   }
   if (components != null) components.clear();
   if (hierarchy != null) hierarchy.release();
   components = null;
   hierarchy = null;
 }
 private void displayRectOnImageSegment(Mat imgMat, boolean markerFound) {
   Scalar color = null;
   if (markerFound) color = new Scalar(0, 255, 0, 255);
   else color = new Scalar(255, 160, 36, 255);
   Rect rect = calculateImageSegmentArea(imgMat);
   Core.rectangle(imgMat, rect.tl(), rect.br(), color, 3, Core.LINE_AA, 0);
 }
示例#7
0
  public static void main(String[] args) {
    try {

      System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
      Mat source = Imgcodecs.imread("test_image.jpg", 0);

      Mat destination = new Mat(source.rows(), source.cols(), source.type());
      Imgproc.GaussianBlur(source, source, new Size(45, 45), 0);
      Imgproc.adaptiveThreshold(
          source, source, 255, Imgproc.ADAPTIVE_THRESH_MEAN_C, Imgproc.THRESH_BINARY, 75, 10);
      Core.bitwise_not(source, source);

      // Line detection
      Mat img2 = null;
      Imgproc.cvtColor(source, img2, Imgproc.COLOR_GRAY2RGB);

      Mat img3 = null;
      Imgproc.cvtColor(source, img3, Imgproc.COLOR_GRAY2RGB);

      MatOfInt4 lines = new MatOfInt4();
      // Imgproc.HoughLines(img, lines, rho, theta, threshold);

      // Write to File
      Imgcodecs.imwrite("gaussian.jpg", source);
      System.out.println("Success!");
    } catch (Exception e) {
      System.out.println("Error has occurred: " + e.getMessage());
    }
  }
示例#8
0
  private void createFigureBitmap() {
    if (mFigureBitmap != null) {
      mFigureBitmap.recycle();
    }
    // ビットマップ用意
    String targetPngFileUri = Camera72Utils.getTargetPngFileFullPath(mDirectory, mCurTarget);
    mFigureBitmap = BitmapFactory.decodeFile(targetPngFileUri, mOpt);
    // 抜き出しまだな場合
    if (mFigureBitmap == null) {
      Utils.showToast(mContext, R.string.not_extract_message);
      String targetJpgFileUri = Camera72Utils.getFgJpgFileFullPath(mDirectory, mCurTarget);
      mFigureBitmap = BitmapFactory.decodeFile(targetJpgFileUri, mOpt);
    }
    Log.i(TAG, "targetPngFile = " + targetPngFileUri);

    // OpenCV用オブジェクトを用意
    Mat mat = new Mat(mFigureBitmap.getHeight(), mFigureBitmap.getWidth(), CvType.CV_8UC4);
    org.opencv.android.Utils.bitmapToMat(mFigureBitmap, mat);

    // OpenCVで明るさ調整する
    Core.convertScaleAbs(mat, mat, mCvBrightScalar, 0);

    // OpenCVでぼかし調整する
    Imgproc.GaussianBlur(mat, mat, mCvGaussianSize, 0);

    // Bitmapに戻す
    org.opencv.android.Utils.matToBitmap(mat, mFigureBitmap);
  }
示例#9
0
  public static void main(String[] args) {

    // 指定读出的图片路径和输出的文件
    String inputImagePath =
        identificate.class.getClassLoader().getResource("hf.jpg").getPath().substring(1);
    String outputImageFile = "identificate.png";

    String xmlPath =
        identificate
            .class
            .getClassLoader()
            .getResource("cascade_storage.xml")
            .getPath()
            .substring(1);
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    CascadeClassifier faceDetector = new CascadeClassifier(xmlPath);
    Mat image = Highgui.imread(inputImagePath);
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    // 画出脸的位置
    for (Rect rect : faceDetections.toArray()) {
      Core.rectangle(
          image,
          new Point(rect.x, rect.y),
          new Point(rect.x + rect.width, rect.y + rect.height),
          new Scalar(0, 0, 255));
    }

    // 写入到文件
    Highgui.imwrite(outputImageFile, image);

    System.out.print("\nOK!");
  }
示例#10
0
 public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
   frame = inputFrame.rgba();
   outputFrame = frame.clone();
   if (homography != null) {
     if (markerValue != -1 && markerValue != oldMarkerValue) {
       oldMarkerValue = markerValue;
       outputLogo = logo.clone();
       Core.putText(
           outputLogo,
           Integer.toString(markerValue),
           new Point(5, 505),
           Core.FONT_HERSHEY_SIMPLEX,
           5,
           new Scalar(255, 0, 0),
           5);
     }
     Imgproc.warpPerspective(
         outputLogo,
         outputFrame,
         homography,
         new Size(WIDTH, HEIGHT),
         Imgproc.INTER_NEAREST,
         Imgproc.BORDER_TRANSPARENT,
         new Scalar(0));
   }
   return outputFrame;
 }
  @Override
  public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) {

    mRgba = inputFrame.rgba();
    Imgproc.cvtColor(mRgba, grayScaleImage, Imgproc.COLOR_RGBA2RGB);

    MatOfRect faces = new MatOfRect();

    // detect faces
    if (cascadeClassifier != null) {
      cascadeClassifier.detectMultiScale(
          grayScaleImage,
          faces,
          1.1,
          2,
          2,
          new Size(absoluteFaceSize, absoluteFaceSize),
          new Size());
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
      Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), new Scalar(0, 255, 0, 255), 3);

    if (facesArray.length > 0) {
      facesInASecond.add(true);
    } else {
      facesInASecond.add(false);
    }

    return mRgba;
  }
示例#12
0
 /**
  * Calculates the final resistance value for each Resistor object, based on its classified band
  * colours, and displays all values on the top right frame of the GUI.
  *
  * @param resistorList The list of Resistors to process. Each Resistor must have 3 associated
  *     ColourBands
  */
 private void calculateAndDisplayResistanceValues(List<Resistor> resistorList) {
   for (int x = 0; x < resistorList.size(); x++) {
     // ignore resistors which do not have 3 bands
     if (resistorList.get(x).bands.size() == 3) {
       Resistor r = resistorList.get(x);
       r.orderBands();
       r.calculateFinalResistanceValue(true);
       if (r.isValid) {
         if (SEARCH_STRING == null
             || SEARCH_STRING.length() == 0
             || r.finalValueHR.equals(SEARCH_STRING)) {
           // draw resistance value to screen
           Core.putText(
               finalDisplayImg,
               r.finalValueHR,
               r.center,
               Core.FONT_HERSHEY_PLAIN,
               2,
               new Scalar(255, 0, 0),
               2);
           // TODO: to try and reduce 'noise' of changing values keep the value for a given point
           // untill it changes.
         }
       }
     }
     paintTR(finalDisplayImg);
   }
 }
示例#13
0
 @Override
 public Result process(CvPipeline pipeline) throws Exception {
   Mat mat = pipeline.getWorkingImage();
   Mat mask = mat.clone();
   Mat masked = mat.clone();
   Scalar color = FluentCv.colorToScalar(Color.black);
   mask.setTo(color);
   masked.setTo(color);
   Core.inRange(
       mat,
       new Scalar(hueMin, saturationMin, valueMin),
       new Scalar(hueMax, saturationMax, valueMax),
       mask);
   Core.bitwise_not(mask, mask);
   mat.copyTo(masked, mask);
   return new Result(masked);
 }
示例#14
0
 public void dibujarRegiones() {
   // **** Crear los cuadros de cada region (para debugging) ***
   // L izquierda
   Point pt1 = new Point(0, 0);
   Point pt2 = new Point(mRgba.width() / 4, mRgba.height());
   Core.rectangle(mRgba, pt1, pt2, WHITE);
   // C centro-arriba
   Point pt3 = new Point(mRgba.width() / 4, 0);
   Point pt4 = new Point((mRgba.width() / 4) * 3, (mRgba.height() / 4) * 3);
   Core.rectangle(mRgba, pt3, pt4, WHITE);
   // N centro-abajo
   Point pt5 = new Point(mRgba.width() / 4, (mRgba.height() / 4) * 3);
   Point pt6 = new Point((mRgba.width() / 4) * 3, mRgba.height());
   Core.rectangle(mRgba, pt5, pt6, WHITE);
   // R derecha
   Point pt7 = new Point((mRgba.width() / 4) * 3, 0);
   Point pt8 = new Point(mRgba.width(), mRgba.height());
   Core.rectangle(mRgba, pt7, pt8, WHITE);
 }
示例#15
0
文件: FdView.java 项目: nagyist/XFace
  private Mat get_template(CascadeClassifier clasificator, Rect area, int size) {
    Mat template = new Mat();
    Mat mROI = mGray.submat(area);
    MatOfRect eyes = new MatOfRect();
    Point iris = new Point();
    Rect eye_template = new Rect();
    clasificator.detectMultiScale(
        mROI,
        eyes,
        1.15,
        2,
        Objdetect.CASCADE_FIND_BIGGEST_OBJECT | Objdetect.CASCADE_SCALE_IMAGE,
        new Size(30, 30),
        new Size());

    Rect[] eyesArray = eyes.toArray();
    for (int i = 0; i < eyesArray.length; i++) {
      Rect e = eyesArray[i];
      e.x = area.x + e.x;
      e.y = area.y + e.y;
      Rect eye_only_rectangle =
          new Rect(
              (int) e.tl().x,
              (int) (e.tl().y + e.height * 0.4),
              (int) e.width,
              (int) (e.height * 0.6));
      mROI = mGray.submat(eye_only_rectangle);
      Mat vyrez = mRgba.submat(eye_only_rectangle);
      Core.MinMaxLocResult mmG = Core.minMaxLoc(mROI);

      Core.circle(vyrez, mmG.minLoc, 2, new Scalar(255, 255, 255, 255), 2);
      iris.x = mmG.minLoc.x + eye_only_rectangle.x;
      iris.y = mmG.minLoc.y + eye_only_rectangle.y;
      eye_template = new Rect((int) iris.x - size / 2, (int) iris.y - size / 2, size, size);
      Core.rectangle(mRgba, eye_template.tl(), eye_template.br(), new Scalar(255, 0, 0, 255), 2);
      template = (mGray.submat(eye_template)).clone();
      return template;
    }
    return template;
  }
示例#16
0
  public static void draw3dAxis(
      Mat frame, CameraParameters cp, Scalar color, double height, Mat Rvec, Mat Tvec) {
    //		Mat objectPoints = new Mat(4,3,CvType.CV_32FC1);
    MatOfPoint3f objectPoints = new MatOfPoint3f();
    Vector<Point3> points = new Vector<Point3>();
    points.add(new Point3(0, 0, 0));
    points.add(new Point3(height, 0, 0));
    points.add(new Point3(0, height, 0));
    points.add(new Point3(0, 0, height));
    objectPoints.fromList(points);

    MatOfPoint2f imagePoints = new MatOfPoint2f();
    Calib3d.projectPoints(
        objectPoints, Rvec, Tvec, cp.getCameraMatrix(), cp.getDistCoeff(), imagePoints);
    List<Point> pts = new Vector<Point>();
    Converters.Mat_to_vector_Point(imagePoints, pts);

    Core.line(frame, pts.get(0), pts.get(1), color, 2);
    Core.line(frame, pts.get(0), pts.get(2), color, 2);
    Core.line(frame, pts.get(0), pts.get(3), color, 2);

    Core.putText(frame, "X", pts.get(1), Core.FONT_HERSHEY_SIMPLEX, 0.5, color, 2);
    Core.putText(frame, "Y", pts.get(2), Core.FONT_HERSHEY_SIMPLEX, 0.5, color, 2);
    Core.putText(frame, "Z", pts.get(3), Core.FONT_HERSHEY_SIMPLEX, 0.5, color, 2);
  }
  public static Mat Histogram(Mat im) {

    Mat img = im;

    Mat equ = new Mat();
    img.copyTo(equ);
    // Imgproc.blur(equ, equ, new Size(3, 3));

    Imgproc.cvtColor(equ, equ, Imgproc.COLOR_BGR2YCrCb);
    List<Mat> channels = new ArrayList<Mat>();
    Core.split(equ, channels);
    Imgproc.equalizeHist(channels.get(0), channels.get(0));
    Core.merge(channels, equ);
    Imgproc.cvtColor(equ, equ, Imgproc.COLOR_YCrCb2BGR);

    Mat gray = new Mat();
    Imgproc.cvtColor(equ, gray, Imgproc.COLOR_BGR2GRAY);
    Mat grayOrig = new Mat();
    Imgproc.cvtColor(img, grayOrig, Imgproc.COLOR_BGR2GRAY);
    System.out.println("Histogram work ///");
    return grayOrig;
  }
  public void templateMatching() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    int match_method = 5;
    int max_Trackbar = 5;
    Mat data = Highgui.imread("images/training_data/1" + "/data (" + 1 + ").jpg");
    Mat temp = Highgui.imread("images/template.jpg");
    Mat img = data.clone();

    int result_cols = img.cols() - temp.cols() + 1;
    int result_rows = img.rows() - temp.rows() + 1;
    Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1);

    Imgproc.matchTemplate(img, temp, result, match_method);
    Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    double minVal;
    double maxVal;
    Point minLoc;
    Point maxLoc;
    Point matchLoc;
    // minMaxLoc( result, &minVal, &maxVal, &minLoc, &maxLoc, Mat() );
    Core.MinMaxLocResult res = Core.minMaxLoc(result);

    if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
      matchLoc = res.minLoc;
    } else {
      matchLoc = res.maxLoc;
    }

    // / Show me what you got
    Core.rectangle(
        img,
        matchLoc,
        new Point(matchLoc.x + temp.cols(), matchLoc.y + temp.rows()),
        new Scalar(0, 255, 0));

    // Save the visualized detection.
    Highgui.imwrite("images/samp.jpg", img);
  }
示例#19
0
  private Mat detectObjectLiteVersion(Mat img) {
    try {
      // writeLock.lock();

      try {
        img_scene = img.clone();
      } finally {
        // writeLock.unlock();
      }

      isRun = true;

      // detectObject();

      /* if (listPointScene != null && !listPointScene.isEmpty())
      {
          for (int i = 0; i < listPointScene.size(); i++)
          {
              Core.circle(img, listPointScene.get(i), 5, Scalar.all(1), 2);

          }
      }*/

      Scalar scalar = new Scalar(0, 255, 0);

      // Log.e("POINT CAP", p0.toString() + p1.toString());
      Core.line(img, p0, p1, scalar, 4);
      Core.line(img, p1, p2, scalar, 4);
      Core.line(img, p2, p3, scalar, 4);
      Core.line(img, p3, p0, scalar, 4);

      return img;

    } catch (Exception ex) {
      Log.e("", "");
      return null;
    }
  }
  private static Mat rodr2quat(Mat rodr) {
    double t = Core.norm(rodr);
    double[] r = new double[3];
    rodr.get(0, 0, r);

    double[] quat = {
      Math.cos(t / 2),
      Math.sin(t / 2) * r[0] / t,
      Math.sin(t / 2) * r[1] / t,
      Math.sin(t / 2) * r[2] / t
    };
    Mat quatm = new Mat(4, 1, CvType.CV_64F);
    quatm.put(0, 0, quat);
    return quatm;
  }
  public static Mat getCCH(Mat image) {
    ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(
        image, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);

    Mat chainHistogram = Mat.zeros(1, 8, CvType.CV_32F);
    int n = 0;
    MatOfPoint2f approxCurve = new MatOfPoint2f();
    for (MatOfPoint contour : contours) {

      // get the freeman chain code from the contours
      int rows = contour.rows();
      // System.out.println("\nrows"+rows+"\n"+contour.dump());
      int direction = 7;
      Mat prevPoint = contours.get(0).row(0);
      n += rows - 1;
      for (int i = 1; i < rows; i++) {
        // get the current point
        double x1 = contour.get(i - 1, 0)[1];
        double y1 = contour.get(i - 1, 0)[0];

        // get the second point
        double x2 = contour.get(i, 0)[1];
        double y2 = contour.get(i, 0)[0];

        if (x2 == x1 && y2 == y1 + 1) direction = 0;
        else if (x2 == x1 - 1 && y2 == y1 + 1) direction = 1;
        else if (x2 == x1 - 1 && y2 == y1) direction = 2;
        else if (x2 == x1 - 1 && y2 == y1 - 1) direction = 3;
        else if (x2 == x1 && y2 == y1 - 1) direction = 4;
        else if (x2 == x1 + 1 && y2 == y1 - 1) direction = 5;
        else if (x2 == x1 + 1 && y2 == y1) direction = 6;
        else if (x2 == x1 + 1 && y2 == y1 + 1) direction = 7;
        else System.out.print("err");
        double counter = chainHistogram.get(0, direction)[0];
        chainHistogram.put(0, direction, ++counter);
        System.out.print(direction);
      }
    }
    System.out.println("\n" + chainHistogram.dump());
    Scalar alpha = new Scalar(n); // the factor
    Core.divide(chainHistogram, alpha, chainHistogram);
    System.out.println("\nrows=" + n + " " + chainHistogram.dump());
    return chainHistogram;
  }
  /**
   * @param source
   * @param delta
   * @return Mat
   */
  public static Mat Sobel(Mat source, int delta) {

    Mat grey = new Mat();
    Imgproc.cvtColor(source, grey, Imgproc.COLOR_BGR2GRAY);
    Mat sobelx = new Mat();
    Imgproc.Sobel(grey, sobelx, CvType.CV_32F, 1, delta);

    double minVal, maxVal;
    Core.MinMaxLocResult minMaxLocResult = Core.minMaxLoc(sobelx);
    minVal = minMaxLocResult.minVal;
    maxVal = minMaxLocResult.maxVal;

    Mat draw = new Mat();
    sobelx.convertTo(
        draw, CvType.CV_8U, 255.0 / (maxVal - minVal), -minVal * 255.0 / (maxVal - minVal));
    return draw;
  }
示例#23
0
 public void onEvent(WarpDrawEvent event) {
   synchronized (this) {
     if ((mode == Mode.SAMPLEWARPPLANE || mode == Mode.SAMPLEWARPGLASS) && useSample) {
       if (!isSetup) setupMatrices();
       double hGlassToSmall[] = getHGlassToSmall(sampleBGR.height(), sampleBGR.width());
       if (hGlassToSmall == null) {
         Log.w(TAG, "Warp: Bad size");
         return;
       }
       double circleCenter[] = {event.getX(), event.getY(), 1};
       double circleCenterSmall[] = HMultPoint(hGlassToSmall, circleCenter);
       Core.circle(
           sampleBGR,
           new Point(circleCenterSmall[0], circleCenterSmall[1]),
           event.getRadius(),
           new Scalar(event.getR(), event.getG(), event.getB()));
     }
   }
 }
示例#24
0
  /**
   * Generates a mask of all resistors present in the given Mat. Also displays this mask in the
   * Bottom Left frame of the GUI.
   *
   * @param imgCap The Mat image to generate the mask for
   * @param type The threshold operation type
   * @return The mask as a Mat
   */
  private Mat generateResistorMask(Mat imgCap, int type) {
    Mat imgHSV = new Mat();
    Mat satImg = new Mat();

    // convert the input image from BGR to HSV
    Imgproc.cvtColor(imgCap, imgHSV, Imgproc.COLOR_BGR2HSV);

    ArrayList<Mat> channels = new ArrayList<Mat>();
    Core.split(imgHSV, channels);
    // extract the saturation channel
    satImg = channels.get(1);

    // remove the background and the resistor leads (combined with previous blurring)
    // thresh ~86
    Imgproc.threshold(satImg, satImg, RESISTOR_MASK_THRESHOLD, 255, type);

    paintBL(satImg);

    return satImg;
  }
  /**
   * calculates the center of mass using <code>colorForTrackingHSV</code> and <code>colorRadius
   * </code> as radius (in HSV-color space)
   *
   * @param hsv the frame of which the center of mass should be calculated off
   * @return the center of mass as a point in pixel coordinates (i.e. integer)
   */
  public Point calcCenterOfMass(Mat hsv) {
    blackWhiteMask = new Mat();
    Core.inRange(hsv, lowerBound, upperBound, blackWhiteMask);

    dilatedMask = new Mat();
    Imgproc.dilate(blackWhiteMask, dilatedMask, new Mat());

    contour = new ArrayList<MatOfPoint>();
    Mat mHierarchy = new Mat();
    Mat tempDilatedMask = dilatedMask.clone();
    Imgproc.findContours(
        tempDilatedMask, contour, mHierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE);

    boundingRect = Imgproc.boundingRect(findLargestContour(contour));

    int centerOfMassX = boundingRect.x + boundingRect.width / 2;
    int centerOfMassY = boundingRect.y + boundingRect.height / 2;

    Point centerOfMass = new Point(centerOfMassX, centerOfMassY);
    trackPath.add(centerOfMass);
    return centerOfMass;
  }
示例#26
0
  /**
   * Creates Resistor objects for all resistors extracted from given contours. Optionally, also
   * displays a bounding rectangle for all contours in the top left frame of the GUI.
   *
   * @param contours The contours defining the resistors
   * @param image The image from which the contours were extracted
   * @param showBoundingRect If true draws a bounding rectange for each contour
   * @return A list of Resistor objects
   */
  private List<Resistor> extractResistorsFromContours(
      List<MatOfPoint> contours, Mat image, boolean showBoundingRect) {
    List<Mat> extractedResistors = new ArrayList<Mat>();
    List<Rect> boundingRect = new ArrayList<Rect>();
    List<Resistor> resistors = new ArrayList<Resistor>();

    for (int i = 0; i < contours.size(); i++) {
      // bounding rectangle
      boundingRect.add(Imgproc.boundingRect(contours.get(i)));
      Mat mask = Mat.zeros(image.size(), CvType.CV_8U);
      Imgproc.drawContours(mask, contours, i, new Scalar(255), Core.FILLED);

      Mat contourRegion;
      Mat imageROI = new Mat();
      image.copyTo(imageROI, mask);
      contourRegion = new Mat(imageROI, boundingRect.get(i));
      extractedResistors.add(contourRegion);

      // the center of the resistor as a point within the original captured image
      Point resistorCenterPoint = findCenter(contours.get(i));

      // create a new resistor entry
      Resistor r = new Resistor(resistorCenterPoint, contourRegion);
      resistors.add(r);
    }

    if (showBoundingRect) {
      Mat drawing = new Mat();
      image.copyTo(drawing);
      for (int i = 0; i < contours.size(); i++) {
        Core.rectangle(
            drawing, boundingRect.get(i).tl(), boundingRect.get(i).br(), new Scalar(0, 0, 255), 2);
      }
      paintTL(drawing);
    }

    return resistors;
  }
示例#27
0
  public Mat onCameraFrame(Mat inputFrame) {

    if (takeScanBase) {
      if (scanBase != null) {
        scanBase.release();
      }
      scanBase = new Mat();
      scanningMat = new Mat();

      Imgproc.cvtColor(inputFrame, scanBase, Imgproc.COLOR_RGB2GRAY);
      Imgproc.cvtColor(scanBase, scanningMat, Imgproc.COLOR_GRAY2RGB);
      scanBase.release();

      hasScanBase = true;
      takeScanBase = false;
    }

    if (hasScanBase) {
      Mat ret;

      Point p = findLaser(inputFrame);
      inputFrame.release();

      if (p != null) {
        if (hasThermal) {
          double temp = mThermal.read();
          data[(int) p.y][(int) p.x] = (float) temp;

          Core.circle(scanningMat, p, 3, new Scalar(255, 0, 0), -3);

        } else {
          // Log.v(TAG, "No thermal!");
        }
      }

      return scanningMat;
    } else return inputFrame;
  }
示例#28
0
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {

    mRgba = inputFrame.rgba();
    mGray = inputFrame.gray();

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
      mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
      if (mJavaDetector != null)
        mJavaDetector.detectMultiScale(
            mGray,
            faces,
            1.1,
            2,
            2, // TODO: objdetect.CV_HAAR_SCALE_IMAGE
            new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
            new Size());
    } else if (mDetectorType == NATIVE_DETECTOR) {
      if (mNativeDetector != null) mNativeDetector.detect(mGray, faces);
    } else {
      Log.e(TAG, "Detection method is not selected!");
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
      Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

    return mRgba;
  }
示例#29
0
  /* (non-Javadoc)
   * @see java.lang.Runnable#run()
   */
  @Override
  public void run() {
    if (MODE.equals("VIDEO")) {
      Mat capturedImage = new Mat();
      VideoCapture vc = new VideoCapture(DEVICE);
      if (!vc.isOpened()) {
        System.out.println("Capture Failed!");
        return;
      }
      System.out.println("Device " + DEVICE + " opened");
      // set captured resolution
      vc.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, 640);
      vc.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, 480);

      // Manually set exposure
      vc.set(15, -11);
      while (true) {
        vc.read(capturedImage);
        if (capturedImage != null) {
          // flip the image to compensate for camera orientation
          Core.flip(capturedImage, capturedImage, -1);
          capturedImage.copyTo(finalDisplayImg);
          parseImage(capturedImage);
        }
      }
    } else { // STILL IMAGE
      Mat capturedImage = Highgui.imread(IMAGE_FILEPATH);
      while (true) {
        if (needUpdate) {
          capturedImage.copyTo(finalDisplayImg);
          parseImage(capturedImage);
          needUpdate = false;
        }
      }
    }
  }
  /**
   * @param inputImg
   * @param minValue
   * @param maxValue
   * @return Mat
   */
  public static Mat thresholding(Mat inputImg, Integer minValue, Integer maxValue) {

    Mat frame = inputImg;
    // яскравість
    // frame.convertTo(frame , -1, 10d * 33 / 100, 0);
    // Imgproc.medianBlur(frame,frame, 17);

    // Core.bitwise_not(frame,frame );

    // Mat frame = new Mat(image.rows(), image.cols(), image.type());

    // frame.convertTo(frame, -1, 10d * 20 / 100, 0);

    Mat hsvImg = new Mat();
    List<Mat> hsvPlanes = new ArrayList<>();
    Mat thresholdImg = new Mat();

    int thresh_type = Imgproc.THRESH_BINARY_INV;

    // if (this.inverse.isSelected())
    // thresh_type = Imgproc.THRESH_BINARY;

    // threshold the image with the average hue value
    // System.out.println("size " +frame.size());
    hsvImg.create(frame.size(), CvType.CV_8U);
    // Imgproc.cvtColor(frame, hsvImg, Imgproc.COLOR_BGR2HSV);
    Core.split(hsvImg, hsvPlanes);

    // get the average hue value of the image
    // double threshValue = PreProcessingOperation.getHistAverage(hsvImg, hsvPlanes.get(0));
    // System.out.println(threshValue);
    /*
    if(threshValue > 40){
        maxValue = 160;
    }else{
        maxValue = 40;
    }*/

    //        Imgproc.threshold(hsvPlanes.get(1), thresholdImg, minValue , maxValue , thresh_type);

    Imgproc.blur(thresholdImg, thresholdImg, new Size(27, 27));

    // dilate to fill gaps, erode to smooth edges
    Imgproc.dilate(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 1);
    Imgproc.erode(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 1);

    Imgproc.threshold(thresholdImg, thresholdImg, minValue, maxValue, Imgproc.THRESH_BINARY);

    // create the new image
    Mat foreground = new Mat(frame.size(), CvType.CV_8UC3, new Scalar(255, 255, 255));
    Core.bitwise_not(thresholdImg, foreground);

    frame.copyTo(foreground, thresholdImg);

    ///////////////////////////////////////////////////////////////////////////////////////
    ///
    ////

    return foreground;
    /*Mat hsvImg = new Mat();
    List<Mat> hsvPlanes = new ArrayList<>();
    Mat thresholdImg = new Mat();
    int thresh_type = Imgproc.THRESH_BINARY_INV;
    // threshold the image with the average hue value
    hsvImg.create(inputImg.size(), CvType.CV_8U);
    Imgproc.cvtColor(inputImg, hsvImg, Imgproc.COLOR_BGR2HSV);
    Core.split(hsvImg, hsvPlanes);
    // get the average hue value of the image
    double threshValue = PreProcessingOperation.getHistAverage(hsvImg, hsvPlanes.get(0));
    Imgproc.threshold(hsvPlanes.get(0), thresholdImg, minValue,
            maxValue, thresh_type);
    Imgproc.blur(thresholdImg, thresholdImg, new Size(3, 3));
    // dilate to fill gaps, erode to smooth edges
    Imgproc.dilate(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 3);
    Imgproc.erode(thresholdImg, thresholdImg, new Mat(), new Point(-1, -1), 1);
    Imgproc.threshold(thresholdImg, thresholdImg, minValue,
            maxValue, Imgproc.THRESH_BINARY);
    // create the new image
    Mat foreground = new Mat(inputImg.size(), CvType.CV_8UC3, new Scalar(255, 255, 255));
    inputImg.copyTo(foreground, thresholdImg);
    Core.bitwise_not(foreground,foreground);
    return foreground;*/
  }