public static void main(String[] args) {

    // 指定读出的图片路径和输出的文件
    String inputImagePath =
        identificate.class.getClassLoader().getResource("hf.jpg").getPath().substring(1);
    String outputImageFile = "identificate.png";

    String xmlPath =
        identificate
            .class
            .getClassLoader()
            .getResource("cascade_storage.xml")
            .getPath()
            .substring(1);
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    CascadeClassifier faceDetector = new CascadeClassifier(xmlPath);
    Mat image = Highgui.imread(inputImagePath);
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    // 画出脸的位置
    for (Rect rect : faceDetections.toArray()) {
      Core.rectangle(
          image,
          new Point(rect.x, rect.y),
          new Point(rect.x + rect.width, rect.y + rect.height),
          new Scalar(0, 0, 255));
    }

    // 写入到文件
    Highgui.imwrite(outputImageFile, image);

    System.out.print("\nOK!");
  }
Exemple #2
0
  public static void main(String[] args) {
    System.loadLibrary("opencv_java2410");

    Mat src = Highgui.imread("img/hx_30.jpg", 0);
    Mat dst = new Mat();

    Imgproc.equalizeHist(src, dst);

    Highgui.imwrite("out/hx_30_src.jpg", src);
    Highgui.imwrite("out/hx_30.jpg", dst);
  }
 public static void main(String[] args) {
   System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
   Mat mat = Card.findCardNumber("test/1.jpg");
   String filename = "test/card/img_card_number.png";
   System.out.println(String.format("Writing %s", filename));
   Highgui.imwrite(filename, mat);
 }
    @Override
    protected Void doInBackground(Void... params) {
      publishProgress(0);
      Mat hsvImg = heatmap(data, progressDialog);
      Mat finishedImage = new Mat();
      Imgproc.cvtColor(hsvImg, finishedImage, Imgproc.COLOR_HSV2BGR);

      File mediaStorageDir =
          new File(Environment.getExternalStorageDirectory().getPath(), "images/Colored_Images");

      if (!mediaStorageDir.exists()) {
        if (!mediaStorageDir.mkdirs()) {
          Log.e(TAG, "failed to create directory");
          return null;
        }
      }

      String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
      Log.v(
          TAG,
          "SAVING: " + mediaStorageDir.getPath() + File.separator + "scan_" + timeStamp + ".jpg");
      Highgui.imwrite(
          mediaStorageDir.getPath() + File.separator + "scan_" + timeStamp + ".jpg", finishedImage);

      return null;
    }
Exemple #5
0
  private static Mat findLargestRectangle(Mat original_image) {
    Mat imgSource = original_image.clone();

    // convert the image to black and white
    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);

    // convert the image to black and white does (8 bit)
    Imgproc.Canny(imgSource, imgSource, 50, 50);

    // apply gaussian blur to smoothen lines of dots
    Imgproc.GaussianBlur(imgSource, imgSource, new Size(5, 5), 5);

    // find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(
        imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    MatOfPoint temp_contour = contours.get(0); // the largest is at the
    // index 0 for starting
    // point
    MatOfPoint2f approxCurve = new MatOfPoint2f();
    List<MatOfPoint> largest_contours = new ArrayList<MatOfPoint>();
    for (int idx = 0; idx < contours.size(); idx++) {
      temp_contour = contours.get(idx);
      double contourarea = Imgproc.contourArea(temp_contour);
      // compare this contour to the previous largest contour found
      if (contourarea > maxArea) {
        // check if this contour is a square
        MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
        int contourSize = (int) temp_contour.total();
        Imgproc.approxPolyDP(new_mat, approxCurve, contourSize * 0.05, true);
        if (approxCurve.total() == 4) {
          maxArea = contourarea;
          largest_contours.add(temp_contour);
        }
      }
    }
    MatOfPoint temp_largest = largest_contours.get(largest_contours.size() - 1);
    largest_contours = new ArrayList<MatOfPoint>();

    largest_contours.add(temp_largest);

    // Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BayerBG2RGB);
    Imgproc.drawContours(original_image, largest_contours, -1, new Scalar(0, 255, 0), 10);

    // Mat perspectiveTransform = new Mat(3, 3, CvType.CV_32FC1);
    // Imgproc.warpPerspective(original_image, imgSource,
    // perspectiveTransform, new Size(300,300));

    Highgui.imwrite(output, original_image);

    // create the new image here using the largest detected square

    // Toast.makeText(getApplicationContext(), "Largest Contour: ",
    // Toast.LENGTH_LONG).show();

    return imgSource;
  }
 public static Mat findCardNumber(String path) {
   Mat mat = Highgui.imread(path);
   int x = 0;
   int y = (int) (mat.height() * ((double) 30 / 54));
   int width = mat.cols();
   int height = (int) (mat.height() * ((double) 7 / 54));
   return mat.submat(new Rect(x, y, width, height));
 }
 private void setImagesForDatabaseEdit() {
   for (int i = 0; i < faceImages.size(); i++) {
     Mat m = Highgui.imread(thisPerson.getFacesFolderPath() + "/" + i + ".jpg");
     if (m != null) {
       onFaceCaptured(m);
     }
   }
 }
  // OpenCV code
  private void modifyImage(String fileName) {
    // Create a face detector from the cascade file
    CascadeClassifier faceDetector = new CascadeClassifier("haarcascade_frontalface_alt.xml");
    Mat image = Highgui.imread(fileName);

    // Detect faces in the image.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    // Blur each face
    for (Rect rect : faceDetections.toArray()) {
      Mat faceArea = image.submat(rect);
      Imgproc.blur(faceArea, faceArea, new Size(30, 30));
    }
    // Save the modified image
    Highgui.imwrite("edited_" + fileName, image);
  }
  public String writePhotoFileToSDCard(
      String filePhotoName, Mat imageMat, File sdCardAbsolutePathFile) {
    File file = new File(sdCardAbsolutePathFile, filePhotoName);

    filePhotoName = file.toString();
    Highgui.imwrite(filePhotoName, imageMat);

    return (filePhotoName);
  }
 public SaliencyResult saliencyalgorithmInterface(ImageObj imgobj, String method) {
   // TODO Auto-generated method stub
   float min = Float.MAX_VALUE;
   float max = Float.MIN_VALUE;
   String imgpath = imgobj.getSourcePath();
   int k_num = imgobj.getK_num();
   SaliencyResult result = new SaliencyResult();
   Mat img = Highgui.imread(imgpath, Highgui.CV_LOAD_IMAGE_GRAYSCALE);
   Mat saliencyMap = new Mat();
   saliencyMap.create(img.rows(), img.cols(), CvType.CV_16U);
   int HistGram[] = new int[256];
   int Gray[] = new int[img.cols() * img.rows()];
   int Dist[] = new int[256];
   float DistMap[] = new float[img.rows() * img.cols()];
   for (int row = 0; row < img.rows(); row++) {
     int CurIndex = row * img.cols();
     for (int col = 0; col < img.cols(); col++) {
       HistGram[(int) (img.get(row, col)[0])]++;
       Gray[CurIndex] = (int) (img.get(row, col)[0]);
       CurIndex++;
     }
   }
   for (int Y = 0; Y < 256; Y++) {
     int Value = 0;
     for (int X = 0; X < 256; X++) Value += Math.abs(Y - X) * HistGram[X];
     Dist[Y] = Value;
   }
   for (int row = 0; row < img.rows(); row++) {
     int CurIndex = row * img.cols();
     for (int col = 0; col < img.cols(); col++) {
       DistMap[CurIndex] = Dist[Gray[CurIndex]];
       if (DistMap[CurIndex] < min) min = DistMap[CurIndex];
       if (DistMap[CurIndex] > max) max = DistMap[CurIndex];
       CurIndex++;
     }
   }
   for (int row = 0; row < img.rows(); row++) {
     int CurIndex = row * img.cols();
     for (int col = 0; col < img.cols(); col++) {
       saliencyMap.put(row, col, partTwo((DistMap[CurIndex] - min) / (max - min) * 255));
       CurIndex++;
     }
   }
   new findMarkUtil();
   int nums[] = null;
   if (method == "kmeans") {
     nums = findMarkUtil.findMarkUtil_kmeans(saliencyMap, k_num, 255, 0, 5);
   } else if (method == "random") {
     nums = findMarkUtil.findMarkUtil_random(saliencyMap, k_num, 255);
   }
   result.setK_num(k_num);
   result.setSource(imgpath);
   result.setResult(nums);
   result.setSaliency(saliencyMap);
   return result;
 }
    @Override
    protected Void doInBackground(final Void... unused) {

      Wrapper wrapper = new Wrapper();

      try {
        serverAddress = InetAddress.getByName(Constants.SERVER_IP);
        serverSocket = new Socket();
        serverSocket.connect(
            new InetSocketAddress(Constants.SERVER_IP, Constants.SERVER_PORT), 5000);
      } catch (Exception e) {
        e.printStackTrace();
      }

      wrapper.type = 0;
      wrapper.status = serverSocket.isConnected();
      publishProgress(wrapper);

      try {
        Thread.sleep(500);

        dataInputStream = new DataInputStream(serverSocket.getInputStream());
        dataOutputStream = new DataOutputStream(serverSocket.getOutputStream());

        wrapper.type = 1;

        while (serverSocket.isConnected()) {
          bytes = 0;

          size = dataInputStream.readInt();
          data = new byte[size];

          for (int i = 0; i < size; i += bytes) {
            bytes = dataInputStream.read(data, i, size - i);
          }

          buff = new Mat(1, size, CvType.CV_8UC1);
          buff.put(0, 0, data);

          rev = Highgui.imdecode(buff, Highgui.CV_LOAD_IMAGE_UNCHANGED);

          Imgproc.cvtColor(rev, ret, Imgproc.COLOR_RGB2BGR);

          wrapper.img = ret;
          publishProgress(wrapper);
          Thread.sleep(75);
        }

      } catch (Exception e) {
        e.printStackTrace();
      }

      return null;
    }
  public void templateMatching() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    int match_method = 5;
    int max_Trackbar = 5;
    Mat data = Highgui.imread("images/training_data/1" + "/data (" + 1 + ").jpg");
    Mat temp = Highgui.imread("images/template.jpg");
    Mat img = data.clone();

    int result_cols = img.cols() - temp.cols() + 1;
    int result_rows = img.rows() - temp.rows() + 1;
    Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1);

    Imgproc.matchTemplate(img, temp, result, match_method);
    Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    double minVal;
    double maxVal;
    Point minLoc;
    Point maxLoc;
    Point matchLoc;
    // minMaxLoc( result, &minVal, &maxVal, &minLoc, &maxLoc, Mat() );
    Core.MinMaxLocResult res = Core.minMaxLoc(result);

    if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
      matchLoc = res.minLoc;
    } else {
      matchLoc = res.maxLoc;
    }

    // / Show me what you got
    Core.rectangle(
        img,
        matchLoc,
        new Point(matchLoc.x + temp.cols(), matchLoc.y + temp.rows()),
        new Scalar(0, 255, 0));

    // Save the visualized detection.
    Highgui.imwrite("images/samp.jpg", img);
  }
Exemple #13
0
  /** Capture images and run color processing through here */
  public void capture() {
    VideoCapture camera = new VideoCapture();

    camera.set(12, -20); // change contrast, might not be necessary

    // CaptureImage image = new CaptureImage();

    camera.open(0); // Useless
    if (!camera.isOpened()) {
      System.out.println("Camera Error");

      // Determine whether to use System.exit(0) or return

    } else {
      System.out.println("Camera OK");
    }

    boolean success = camera.read(capturedFrame);
    if (success) {
      try {
        processWithContours(capturedFrame, processedFrame);
      } catch (Exception e) {
        System.out.println(e);
      }
      // image.processFrame(capturedFrame, processedFrame);
      // processedFrame should be CV_8UC3

      // image.findCaptured(processedFrame);

      // image.determineKings(capturedFrame);

      int bufferSize = processedFrame.channels() * processedFrame.cols() * processedFrame.rows();
      byte[] b = new byte[bufferSize];

      processedFrame.get(0, 0, b); // get all the pixels
      // This might need to be BufferedImage.TYPE_INT_ARGB
      img =
          new BufferedImage(
              processedFrame.cols(), processedFrame.rows(), BufferedImage.TYPE_INT_RGB);
      int width = (int) camera.get(Highgui.CV_CAP_PROP_FRAME_WIDTH);
      int height = (int) camera.get(Highgui.CV_CAP_PROP_FRAME_HEIGHT);
      // img.getRaster().setDataElements(0, 0, width, height, b);
      byte[] a = new byte[bufferSize];
      System.arraycopy(b, 0, a, 0, bufferSize);

      Highgui.imwrite("camera.jpg", processedFrame);
      System.out.println("Success");
    } else System.out.println("Unable to capture image");

    camera.release();
  }
 public void run() {
   System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
   MyCvWindow cvWindow = new MyCvWindow("sample", 512, 512);
   String filepath = getClass().getResource("lena.jpg").getPath();
   Mat image;
   while (true) {
     image = Highgui.imread(filepath);
     cvWindow.showImage(image);
     Point point = cvWindow.touchedPoint();
     if (point != null) {
       System.out.println("Point(" + point.x + "," + point.y + ")");
     }
     int key = cvWindow.waitKey(40);
     if (key == MyCvWindow.KEY_ESC) {
       System.exit(0);
     }
   }
 }
Exemple #15
0
  public static void main(String[] args) {

    if (args.length < 1) {
      System.out.println("Filenamemissing");
      System.exit(-1);
    }

    Mat m = Highgui.imread(args[0]);

    DetectSudoku sudoku = new DetectSudoku();
    List<Integer> l = sudoku.extractDigits(m);

    Board b = Board.of(9, Joiner.on(" ").join(l));

    System.out.println("Grabbed sudoku\n==============\n\n");
    System.out.println(b);

    SudokuSolver s = new SudokuSolver(b);
    Board solved = s.solve();

    System.out.println("Solved sudoku\n=============\n\n");
    System.out.println(solved);
  }
  public static void extractQueryFeatures2HDFS(String filename, Job job) throws IOException {

    // Read the local image.jpg as a Mat
    Mat query_mat_float =
        Highgui.imread(LOCAL_USER_DIR + ID + INPUT + "/" + filename, CvType.CV_32FC3);
    // Convert RGB to GRAY
    Mat query_gray = new Mat();
    Imgproc.cvtColor(query_mat_float, query_gray, Imgproc.COLOR_RGB2GRAY);
    // Convert the float type to unsigned integer(required by SIFT)
    Mat query_mat_byte = new Mat();
    query_gray.convertTo(query_mat_byte, CvType.CV_8UC3);
    //        // Resize the image to 1/FACTOR both width and height
    //        Mat query_mat_byte = FeatureExtraction.resize(query_mat_byte);

    // Extract the feature from the (Mat)image
    Mat query_features = FeatureExtraction.extractFeature(query_mat_byte);

    System.out.println(PREFIX + "Extracting the query image feature...");
    System.out.println("query_mat(float,color):" + query_mat_float);
    System.out.println("query_mat(float,gray):" + query_gray);
    System.out.println("query_mat(byte,gray):" + query_mat_byte);
    System.out.println("query_mat_features:" + query_features);
    System.out.println();

    // Store the feature to the hdfs in order to use it later in different map tasks
    System.out.println(PREFIX + "Generating the feature file for the query image in HDFS...");
    FileSystem fs = FileSystem.get(job.getConfiguration());
    String featureFileName = filename.substring(0, filename.lastIndexOf(".")) + ".json";
    FSDataOutputStream fsDataOutputStream =
        fs.create(new Path(HDFS_HOME + USER + ID + INPUT + "/" + featureFileName));
    BufferedWriter bw =
        new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
    bw.write(FeatureExtraction.mat2json(query_features));
    bw.close();
    System.out.println(PREFIX + "Query feature extraction finished...");
    System.out.println();
  }
  /* (non-Javadoc)
   * @see java.lang.Runnable#run()
   */
  @Override
  public void run() {
    if (MODE.equals("VIDEO")) {
      Mat capturedImage = new Mat();
      VideoCapture vc = new VideoCapture(DEVICE);
      if (!vc.isOpened()) {
        System.out.println("Capture Failed!");
        return;
      }
      System.out.println("Device " + DEVICE + " opened");
      // set captured resolution
      vc.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, 640);
      vc.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, 480);

      // Manually set exposure
      vc.set(15, -11);
      while (true) {
        vc.read(capturedImage);
        if (capturedImage != null) {
          // flip the image to compensate for camera orientation
          Core.flip(capturedImage, capturedImage, -1);
          capturedImage.copyTo(finalDisplayImg);
          parseImage(capturedImage);
        }
      }
    } else { // STILL IMAGE
      Mat capturedImage = Highgui.imread(IMAGE_FILEPATH);
      while (true) {
        if (needUpdate) {
          capturedImage.copyTo(finalDisplayImg);
          parseImage(capturedImage);
          needUpdate = false;
        }
      }
    }
  }
  /**
   * Analyze video frames using computer vision approach and generate a ArrayList<AttitudeRec>
   *
   * @param recs output ArrayList of AttitudeRec
   * @return total number of frame of the video
   */
  private int analyzeVideo(ArrayList<AttitudeRec> recs) {
    VideoMetaInfo meta = new VideoMetaInfo(new File(mPath, "videometa.json"));

    int decimation = 1;

    if (meta.fps > DECIMATION_FPS_TARGET) {
      decimation = (int) (meta.fps / DECIMATION_FPS_TARGET);
      meta.fps /= decimation;
    }

    VideoDecoderForOpenCV videoDecoder =
        new VideoDecoderForOpenCV(
            new File(mPath, "video.mp4"), decimation); // every 3 frame process 1 frame

    Mat frame;
    Mat gray = new Mat();
    int i = -1;

    Size frameSize = videoDecoder.getSize();

    if (frameSize.width != meta.frameWidth || frameSize.height != meta.frameHeight) {
      // this is very unlikely
      return -1;
    }

    if (TRACE_VIDEO_ANALYSIS) {
      Debug.startMethodTracing("cvprocess");
    }

    Size patternSize = new Size(4, 11);

    float fc = (float) (meta.frameWidth / 2.0 / Math.tan(meta.fovWidth / 2.0));
    Mat camMat = cameraMatrix(fc, new Size(frameSize.width / 2, frameSize.height / 2));
    MatOfDouble coeff = new MatOfDouble(); // dummy

    MatOfPoint2f centers = new MatOfPoint2f();
    MatOfPoint3f grid = asymmetricalCircleGrid(patternSize);
    Mat rvec = new MatOfFloat();
    Mat tvec = new MatOfFloat();

    MatOfPoint2f reprojCenters = new MatOfPoint2f();

    if (LOCAL_LOGV) {
      Log.v(TAG, "Camera Mat = \n" + camMat.dump());
    }

    long startTime = System.nanoTime();

    while ((frame = videoDecoder.getFrame()) != null) {
      if (LOCAL_LOGV) {
        Log.v(TAG, "got a frame " + i);
      }

      // has to be in front, as there are cases where execution
      // will skip the later part of this while
      i++;

      // convert to gray manually as by default findCirclesGridDefault uses COLOR_BGR2GRAY
      Imgproc.cvtColor(frame, gray, Imgproc.COLOR_RGB2GRAY);

      boolean foundPattern =
          Calib3d.findCirclesGridDefault(
              gray, patternSize, centers, Calib3d.CALIB_CB_ASYMMETRIC_GRID);

      if (!foundPattern) {
        // skip to next frame
        continue;
      }

      if (OUTPUT_DEBUG_IMAGE) {
        Calib3d.drawChessboardCorners(frame, patternSize, centers, true);
      }

      // figure out the extrinsic parameters using real ground truth 3D points and the pixel
      // position of blobs found in findCircleGrid, an estimated camera matrix and
      // no-distortion are assumed.
      boolean foundSolution =
          Calib3d.solvePnP(grid, centers, camMat, coeff, rvec, tvec, false, Calib3d.CV_ITERATIVE);

      if (!foundSolution) {
        // skip to next frame
        if (LOCAL_LOGV) {
          Log.v(TAG, "cannot find pnp solution in frame " + i + ", skipped.");
        }
        continue;
      }

      // reproject points to for evaluation of result accuracy of solvePnP
      Calib3d.projectPoints(grid, rvec, tvec, camMat, coeff, reprojCenters);

      // error is evaluated in norm2, which is real error in pixel distance / sqrt(2)
      double error = Core.norm(centers, reprojCenters, Core.NORM_L2);

      if (LOCAL_LOGV) {
        Log.v(TAG, "Found attitude, re-projection error = " + error);
      }

      // if error is reasonable, add it into the results
      if (error < REPROJECTION_THREASHOLD) {
        double[] rv = new double[3];
        rvec.get(0, 0, rv);
        recs.add(new AttitudeRec((double) i / meta.fps, rodr2rpy(rv)));
      }

      if (OUTPUT_DEBUG_IMAGE) {
        Calib3d.drawChessboardCorners(frame, patternSize, reprojCenters, true);
        Highgui.imwrite(
            Environment.getExternalStorageDirectory().getPath()
                + "/RVCVRecData/DebugCV/img"
                + i
                + ".png",
            frame);
      }
    }

    if (LOCAL_LOGV) {
      Log.v(TAG, "Finished decoding");
    }

    if (TRACE_VIDEO_ANALYSIS) {
      Debug.stopMethodTracing();
    }

    if (LOCAL_LOGV) {
      // time analysis
      double totalTime = (System.nanoTime() - startTime) / 1e9;
      Log.i(TAG, "Total time: " + totalTime + "s, Per frame time: " + totalTime / i);
    }
    return i;
  }
Exemple #19
0
  /**
   * Determines which pieces are kings
   *
   * @param in Mat image of board
   */
  public void determineKings(Mat in) {
    int playSquares = 32;

    Mat dst = new Mat(in.rows(), in.cols(), in.type());
    in.copyTo(dst);

    Imgproc.cvtColor(dst, dst, Imgproc.COLOR_BGR2GRAY); // change to single color

    Mat canny = new Mat();
    Imgproc.Canny(dst, canny, 100, 200); // make image a canny image that is only edges; 2,4
    // lower threshold values find more edges
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat(); // holds nested contour information
    Imgproc.findContours(
        canny,
        contours,
        hierarchy,
        Imgproc.RETR_LIST,
        Imgproc.CHAIN_APPROX_SIMPLE); // Imgproc.RETR_LIST, TREE

    // draw contour image
    Mat mask = new Mat();
    mask = Mat.zeros(dst.size(), dst.type());
    Imgproc.drawContours(
        mask, contours, -1, new Scalar(255, 255, 255), 1, 8, hierarchy, 2, new Point());
    Highgui.imwrite("contours.jpg", mask);

    ArrayList occupied = new ArrayList<Integer>();
    for (int i = 0; i < playSquares; i++) {
      if (board[i] != 0) occupied.add(i);
    }

    for (int i = 0; i < contours.size(); i++) // assuming only contours are checker pieces
    {
      // determine if it should be a king
      // use Rect r = Imgproc.boundingRect then find height of it by r.height

      // Get bounding rect of contour
      Rect bound = Imgproc.boundingRect(contours.get(i));

      if (bound.height > in.rows() / 8) {
        // board[(int) occupied.get(0)]++; // make it a king
        // occupied.remove(0);
      }
    }

    // or apply to each region of interest

    /*
    // keep track of starting row square
    int parity = 0; // 0 is even, 1 is odd, tied to row number
    int count = 0; // row square
    int rowNum = 0; // row number, starting at 0

    int vsegment = in.rows() / 8; // only accounts 8 playable
    int hsegment = in.cols() / 12; // 8 playable, 2 capture, 2 extra
    int offset = hsegment * 2; // offset for playable board

    // For angle of camera
    int dx = 48;
    hsegment -= 8;


    // Go through all playable squares
    for (int i = 0; i < playSquares; i++)
    {
    	// change offset depending on the row
    	if (parity == 0) // playable squares start on immediate left
    		offset = hsegment * 3 + dx;
    	else // playable squares start on 2nd square from left
    		offset = hsegment * 2 + dx;

    	// find where roi should be
    	Point p1 = new Point(offset + count * hsegment, rowNum * vsegment); // top left point of rectangle (x,y)
    	Point p2 = new Point(offset + (count + 1) * hsegment, (rowNum + 1) * vsegment); // bottom right point of rectangle (x,y)

    	// create rectangle that is board square
    	Rect bound = new Rect(p1, p2);

    	// frame only includes rectangle
    	Mat roi = new Mat(in, bound);

           Imgproc.cvtColor(roi, roi, Imgproc.COLOR_BGR2GRAY); // change to single color

           Mat canny = new Mat();
           Imgproc.Canny(roi, canny, 2, 4); // make image a canny image that is only edges; 2,4
           // lower threshold values find more edges
           List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
           Mat hierarchy = new Mat(); // holds nested contour information
           Imgproc.findContours(canny, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); // Imgproc.RETR_LIST, TREE

           // Get bounding rect of contour
              Rect rect = Imgproc.boundingRect(contours.get(0));

              if (rect.height > in.rows() / 8)
    	{
    		board[i]++; // make it a king
    	}

    	count += 2;
    	if (count == 8)
    	{
    		parity = ++parity % 2; // change odd or even
    		count = 0;
    		rowNum++;
    		hsegment += 1;
    		dx -= 6;
    	}
    }*/
  }
 protected Mat ImageBGRFromString(byte[] data) {
   Mat frame = new Mat(1, data.length, CvType.CV_8UC1);
   frame.put(0, 0, data);
   return Highgui.imdecode(frame, 1);
 }
Exemple #21
0
 public static void loadSquareStuffBwahaha() {
   System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
   // System.out.println(input);
   Mat src = Highgui.imread(input);
   getSquare(src);
 }
  public static void main(String[] args) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    //      Mat mat = Mat.eye( 3, 3, CvType.CV_8UC1 );
    //      System.out.println( "mat = " + mat.dump() );

    Sample n = new Sample();
    //   n.templateMatching();

    // put text in image
    //      Mat data= Highgui.imread("images/erosion.jpg");

    //      Core.putText(data, "Sample", new Point(50,80), Core.FONT_HERSHEY_SIMPLEX, 1, new
    // Scalar(0,0,0),2);
    //
    //      Highgui.imwrite("images/erosion2.jpg", data);

    // getting dct of an image
    String path = "images/croppedfeature/go (20).jpg";
    path = "images/wordseg/img1.png";
    Mat image = Highgui.imread(path, Highgui.IMREAD_GRAYSCALE);
    ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Imgproc.threshold(image, image, 0, 255, Imgproc.THRESH_OTSU);
    Imgproc.threshold(image, image, 220, 128, Imgproc.THRESH_BINARY_INV);
    Mat newImg = new Mat(45, 100, image.type());

    newImg.setTo(new Scalar(0));
    n.copyMat(image, newImg);

    int vgap = 25;
    int hgap = 45 / 3;

    Moments m = Imgproc.moments(image, false);
    Mat hu = new Mat();
    Imgproc.HuMoments(m, hu);
    System.out.println(hu.dump());

    //      //divide the mat into 12 parts then get the features of each part
    //      int count=1;
    //      for(int j=0; j<45; j+=hgap){
    //    	  for(int i=0;i<100;i+=vgap){
    //    		  Mat result = newImg.submat(j, j+hgap, i, i+vgap);
    //
    //
    //    		  Moments m= Imgproc.moments(result, false);
    //    		  double m01= m.get_m01();
    //    		  double m00= m.get_m00();
    //    		  double m10 = m.get_m10();
    //    		  int x= m00!=0? (int)(m10/m00):0;
    //    		  int y= m00!=0? (int)(m01/m00):0;
    //    		  Mat hu= new Mat();
    //    		  Imgproc.HuMoments(m, hu);
    //    		  System.out.println(hu.dump());
    //    		  System.out.println(count+" :"+x+" and "+y);
    //    		  Imgproc.threshold(result, result, 0,254, Imgproc.THRESH_BINARY_INV);
    //    		  Highgui.imwrite("images/submat/"+count+".jpg", result);
    //    		  count++;
    //
    //    	  }
    //      }
    //
    //    for(int i=vgap;i<100;i+=vgap){
    //	  Point pt1= new Point(i, 0);
    //      Point pt2= new Point(i, 99);
    //      Core.line(newImg, pt1, pt2, new Scalar(0,0,0));
    //  }
    //  for(int i=hgap;i<45;i+=hgap){
    //	  Point pt1= new Point(0, i);
    //      Point pt2= new Point(99, i);
    //      Core.line(newImg, pt1, pt2, new Scalar(0,0,0));
    //  }
    //      Highgui.imwrite("images/submat/copyto.jpg", newImg);
  }
  public static void main(String[] args) {

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    /** *** Configuration Variables **** */
    int imgWidth = 200;
    int imgHeight = 200;
    int numPatch = 2000;
    int patchWidth = 40;
    int patchHeight = 40;
    int k = 200; // kmeans number of center
    int numBins = 8;

    String filePathRed = "base/Red/";
    String filePathBlack = "base/Black";
    String procPathRed = "base/ProcRed";
    String procPathBlack = "base/ProcBlack";
    /** ******************************** */
    ArrayList<String> fileNames = new ArrayList<String>();

    sources = new ArrayList<Mat>();

    /* Image IO */
    try {

      /* Read Red Staplers */
      File folder = new File(filePathRed);
      BufferedImage currentImage;
      for (final File fileEntry : folder.listFiles()) {
        if (!fileEntry.isDirectory()) {

          // Resize Image
          currentImage = ImageProc.resize(ImageIO.read(fileEntry), imgWidth, imgHeight);

          File outFile = new File(procPathRed + "/" + fileEntry.getName());
          ImageIO.write(currentImage, "JPG", outFile);
          sources.add(Highgui.imread(outFile.getPath()));
          fileNames.add(outFile.getName());
        }
      }

      /* Read Black Staplers */
      folder = new File(filePathBlack);
      for (final File fileEntry : folder.listFiles()) {
        if (!fileEntry.isDirectory()) {

          // Resize Image
          currentImage = ImageProc.resize(ImageIO.read(fileEntry), imgWidth, imgHeight);

          File outFile = new File(procPathBlack + "/" + fileEntry.getName());
          ImageIO.write(currentImage, "JPG", outFile);
          sources.add(Highgui.imread(outFile.getPath()));
          fileNames.add(outFile.getName());
        }
      }

    } catch (IOException e) {
      e.printStackTrace();
    }

    /** ************************************* */
    float[] p1 = new float[30];
    float[] p2 = new float[30];

    /* Create Image Patches and calculate color feature vector for each patch */
    Iterator<Mat> imgIter = sources.iterator();
    Mat thisImage;
    Mat featureMat = new Mat();
    List<Mat> imagePatches = null;
    Iterator<Mat> patchIter = null;

    while (imgIter.hasNext()) {

      thisImage = imgIter.next();

      // Randomly Sample Patches
      imagePatches = ImageProc.sampleImage(thisImage, patchWidth, patchHeight, numPatch);
      patchIter = imagePatches.iterator();

      // Create color feature vector for each patch
      while (patchIter.hasNext()) {
        featureMat.push_back(ImageProc.calBGRFeature(patchIter.next(), numBins));
      }
    }

    Mat centers = new Mat();
    Mat bestLabels = new Mat();
    Core.kmeans(
        featureMat,
        k,
        bestLabels,
        new TermCriteria(TermCriteria.EPS, 0, Math.pow(10, -5)),
        0,
        Core.KMEANS_RANDOM_CENTERS,
        centers);

    MatOfFloat bestLabelRange = new MatOfFloat(0, k);

    ArrayList<Mat> centerHist = new ArrayList<Mat>();
    Mat centerHistMat = new Mat(0, k, CvType.CV_32FC1);

    imgIter = sources.listIterator();
    Iterator<String> nameIter = fileNames.iterator();

    int ptr = 0;
    int cnt = 0;

    // Output CSV

    try {
      File outCSV = new File("output/res.csv");
      FileWriter fstream = new FileWriter(outCSV);
      BufferedWriter out = new BufferedWriter(fstream);
      StringBuilder sb;
      out.write("@relation staplers\n");
      for (int n = 0; n < 200; n++) {
        out.write("@attribute " + "a" + n + " real\n");
      }

      out.write("@attribute class {RedStapler, BlackStapler}\n\n");
      out.write("@data\n\n");

      while (imgIter.hasNext()) {

        Mat thisMat = new Mat(bestLabels, new Range(ptr, ptr + numPatch), new Range(0, 1));
        Mat mat = new Mat();
        thisMat.convertTo(mat, CvType.CV_32F);

        ArrayList<Mat> bestLabelList = new ArrayList<Mat>();
        bestLabelList.add(mat);

        Mat thisHist = new Mat();
        Imgproc.calcHist(
            bestLabelList, new MatOfInt(0), new Mat(), thisHist, new MatOfInt(k), bestLabelRange);

        centerHist.add(thisHist);

        // Create file
        sb = new StringBuilder();

        float[] histArr = new float[(int) thisHist.total()];
        thisHist.get(0, 0, histArr);

        for (int m = 0; m < histArr.length; m++) {
          sb.append(histArr[m] + ",");
        }

        if (cnt++ < 10) sb.append("RedStapler");
        else sb.append("BlackStapler");

        sb.append("\n");
        out.write(sb.toString());
        // Close the output stream

        centerHistMat.push_back(thisHist.t());
        ptr += numPatch;
        imgIter.next();
      }

      out.close();
    } catch (IOException e) { // Catch exception if any
      System.err.println("Error: " + e.getMessage());
      System.exit(-1);
    }

    /* Support Vector Machine Validation */
    Mat labelMat = new Mat(sources.size(), 1, CvType.CV_32FC1);

    double[] labels = new double[20];
    for (int i = 0; i < 10; i++) {
      labels[i] = 1;
      labels[i + 10] = -1;
    }
    labelMat.put(0, 0, labels);

    CvSVMParams params = new CvSVMParams();
    params.set_kernel_type(CvSVM.LINEAR);

    CvSVM svm = new CvSVM();
    svm.train(centerHistMat, labelMat, new Mat(), new Mat(), params);
    svm.save("base/haha.txt");
    String basePath = "base/predict/";

    try {
      File testCSV = new File("output/test.arff");
      FileWriter testStream = new FileWriter(testCSV);
      BufferedWriter testOut = new BufferedWriter(testStream);

      testOut.write("@relation staplers\n");
      for (int n = 0; n < 200; n++) {
        testOut.write("@attribute " + "a" + n + " real\n");
      }

      testOut.write("@attribute class {RedStapler, BlackStapler}\n\n");
      testOut.write("@data\n\n");

      for (int m = 0; m < 21; m++) {

        // System.out.println(basePath + m + ".jpg");
        Mat testImg = Highgui.imread(basePath + m + ".jpg");

        List<Mat> patches = ImageProc.sampleImage(testImg, patchWidth, patchHeight, numPatch);
        List<Mat> features = new ArrayList<Mat>();

        for (int i = 0; i < patches.size(); i++) {

          Mat testVector = ImageProc.calBGRFeature(patches.get(i), numBins);
          features.add(testVector);
        }

        Mat testData = ImageProc.calFeatureVector(features, centers);

        StringBuilder testsb = new StringBuilder();
        // String name = nameIter.next();
        // sb.append(name + ",");

        float[] data = new float[testData.cols()];
        testData.get(0, 0, data);

        for (int o = 0; o < data.length; o++) {
          testsb.append(data[o] + ",");
        }
        if (m < 6) testsb.append("RedStapler");
        else testsb.append("BlackStapler");

        testsb.append("\n");
        testOut.write(testsb.toString());

        System.out.println("Img" + m + " " + svm.predict(testData));
      }
    } catch (IOException e) {
      e.printStackTrace();
      System.exit(-1);
    }
  }
    @Override
    public void run() {
      // Initialisation
      cptRect = 0;
      initialiseRectangle();

      if (video.isOpened()) {
        while (begin == true) {
          // On récupère l'image de la CaptureVideo
          video.retrieve(frameaux);
          // On modifie les dimensions de la frame
          Imgproc.resize(frameaux, frame, frame.size());
          // On copie
          frame.copyTo(currentFrame);

          if (jCheckBoxMotionDetection.isSelected()) {
            if (firstFrame) {
              frame.copyTo(lastFrame);
              firstFrame = false;
              continue;
            }

            // Soustraction de currentFrame par rapport à la dernière
            Core.subtract(currentFrame, lastFrame, processedFrame);

            // Filtre en niveau de gris
            Imgproc.cvtColor(processedFrame, processedFrame, Imgproc.COLOR_RGB2GRAY);

            // Filtre threshold + récupération du Jslider
            int threshold = jSliderThreshold.getValue();
            Imgproc.threshold(
                processedFrame, processedFrame, threshold, 255, Imgproc.THRESH_BINARY);

            // Detecte les contours et check dans les
            detection_contours(currentFrame, processedFrame);
          }
          // Dessine les rectangles d'authentifications
          drawRectangle();

          currentFrame.copyTo(processedFrame);

          // Encodage de la frame en MatOfByte
          Highgui.imencode(".jpg", processedFrame, matOfByte);
          byte[] byteArray = matOfByte.toArray();

          // Affichage de l'image
          try {
            in = new ByteArrayInputStream(byteArray);
            bufImage = ImageIO.read(in);
            image.updateImage(bufImage);
          } catch (Exception ex) {
            ex.printStackTrace();
          }

          try {
            Thread.sleep(50);
          } catch (Exception ex) {
            ex.printStackTrace();
          }
        }
      }
    }
Exemple #25
0
  public static void getSquare(Mat imgSource) {
    Mat sourceImage = imgSource.clone();
    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY);
    // convert the image to black and white does (8 bit)
    Imgproc.Canny(imgSource, imgSource, 50, 50);

    // apply gaussian blur to smoothen lines of dots
    Imgproc.GaussianBlur(imgSource, imgSource, new org.opencv.core.Size(5, 5), 5);

    // find the contours
    List<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Imgproc.findContours(
        imgSource, contours, new Mat(), Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE);

    double maxArea = -1;
    int maxAreaIdx = -1;
    // Log.d("size",Integer.toString(contours.size()));
    MatOfPoint temp_contour = contours.get(0); // the largest is at the
    // index 0 for starting
    // point
    MatOfPoint2f approxCurve = new MatOfPoint2f();
    MatOfPoint largest_contour = contours.get(0);
    // largest_contour.ge
    List<MatOfPoint> largest_contours = new ArrayList<MatOfPoint>();
    // Imgproc.drawContours(imgSource,contours, -1, new Scalar(0, 255, 0),
    // 1);

    for (int idx = 0; idx < contours.size(); idx++) {
      temp_contour = contours.get(idx);
      double contourarea = Imgproc.contourArea(temp_contour);
      // compare this contour to the previous largest contour found
      if (contourarea > maxArea) {
        // check if this contour is a square
        MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray());
        int contourSize = (int) temp_contour.total();
        MatOfPoint2f approxCurve_temp = new MatOfPoint2f();
        Imgproc.approxPolyDP(new_mat, approxCurve_temp, contourSize * 0.05, true);
        if (approxCurve_temp.total() == 4) {
          maxArea = contourarea;
          maxAreaIdx = idx;
          approxCurve = approxCurve_temp;
          largest_contour = temp_contour;
        }
      }
    }

    Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BayerBG2RGB);

    double[] temp_double;
    temp_double = approxCurve.get(0, 0);
    Point p1 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p1,55,new Scalar(0,0,255));
    // Imgproc.warpAffine(sourceImage, dummy, rotImage,sourceImage.size());
    temp_double = approxCurve.get(1, 0);
    Point p2 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p2,150,new Scalar(255,255,255));
    temp_double = approxCurve.get(2, 0);
    Point p3 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p3,200,new Scalar(255,0,0));
    temp_double = approxCurve.get(3, 0);
    Point p4 = new Point(temp_double[0], temp_double[1]);
    // Core.circle(imgSource,p4,100,new Scalar(0,0,255));
    List<Point> source = getCorners(p1, p2, p3, p4);
    for (Point p : source) {
      // System.out.println(p);
    }
    Mat startM = Converters.vector_Point2f_to_Mat(source);
    // Imgproc.cvtColor(sourceImage, sourceImage, Imgproc.COLOR_BGR2GRAY);
    Mat result = warp(sourceImage, startM, 5);
    // result = warp(result,result,1);
    // Imgproc.cvtColor(result, result, Imgproc.COLOR_BGR2GRAY);
    Highgui.imwrite(output, result);
    // System.out.println("Done");
    // return result;
  }
Exemple #26
0
  public void run() {
    System.out.println("\nRunning DetectFaceDemo");

    // Create a face detector from the cascade file in the resources
    // directory.
    // String facefilterpath =
    // getClass().getResource("../resources/haarcascade_mcs_eyepair_big.xml").getPath();
    String facefilterpath = getClass().getResource("../resources/haarcascade_eye.xml").getPath();
    facefilterpath = facefilterpath.substring(1, facefilterpath.length());
    CascadeClassifier faceDetector = new CascadeClassifier(facefilterpath);
    String pngpath = getClass().getResource("../resources/brown_eyes.jpg").getPath();
    pngpath = pngpath.substring(1, pngpath.length());
    Mat image = Highgui.imread(pngpath);

    // Detect faces in the ismage.
    // MatOfRect is a special container class for Rect.
    MatOfRect faceDetections = new MatOfRect();
    faceDetector.detectMultiScale(image, faceDetections);

    Mat image2 = image;

    Imgproc.cvtColor(image2, image, 6); // 6 = CV_BGR2GRAY not working
    Imgproc.GaussianBlur(image, image, new Size(7, 7), 4, 4);
    // Imgproc.medianBlur(image,image, 2);
    MatOfPoint3f circles = new MatOfPoint3f();
    MatOfPoint3f circles2 = new MatOfPoint3f();

    Imgproc.HoughCircles(
        image, circles, Imgproc.CV_HOUGH_GRADIENT, 5, image.rows() / 5, 100, 100, 10, 50);

    Imgproc.HoughCircles(
        image, circles2, Imgproc.CV_HOUGH_GRADIENT, 5, image.rows() / 5, 100, 100, 50, 400);

    Imgproc.cvtColor(image, image, 8); // 6 = CV_BGR2GRAY not working

    System.out.println(String.format("Detected %s faces", faceDetections));
    // Draw a bounding box around each face.
    for (Rect rect : faceDetections.toArray()) {
      // Core.rectangle(image, new Point(rect.x, rect.y), new Point(rect.x + rect.width, rect.y +
      // rect.height), new Scalar(0, 255, 0),100);
    }

    System.out.println(String.format("Detected %s circles", circles.total()));

    for (Point3 circle : circles.toArray()) {
      Point center = new Point(circle.x, circle.y);
      int radius = (int) Math.round(circle.z);
      Core.circle(image, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
      Core.circle(image, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
      // Core.circle(image, center, radius, new Scalar(0,255,0), 10,8, 0);
    }
    for (Point3 circle : circles2.toArray()) {
      Point center = new Point(circle.x, circle.y);
      int radius = (int) Math.round(circle.z);
      Core.circle(image, center, 3, new Scalar(0, 255, 0), -1, 8, 0);
      Core.circle(image, center, radius, new Scalar(0, 0, 255), 3, 8, 0);
      // Core.circle(image, center, radius, new Scalar(0,255,0), 10,8, 0);
    }

    // Core.circle(image, new Point(100,100), 10, new Scalar(0,255,0), 10, 8, 0);
    // Save the visualized detection.

    String filename = "faceDetection.png";
    System.out.println(String.format("Writing %s", filename));
    Highgui.imwrite(filename, image);
  }
Exemple #27
0
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    img = inputFrame.rgba();
    endTime = System.currentTimeMillis();

    switch (viewMode) {
      case VIEW_MODE_RGBA:
      /** Detects the circles in the RGB format */
        frameCount++;
        drawCircles = false;
        /** Setting thrust */
        twist.dx(thrust);
        /** Flip image when using on boat */
        // Core.flip(img, img, 0);
        /** Convert it to hue, convert to range color, and blur to remove false circles */
        Imgproc.cvtColor(img, img_hue, Imgproc.COLOR_RGB2HSV);
        img_hue = InRangeCircles(img_hue);
        Imgproc.GaussianBlur(img_hue, img_hue, new Size(9, 9), 10, 10);

        /** Create mat for circles and apply the Hough Transform to find the circles */
        Mat circles = new Mat();
        Imgproc.HoughCircles(
            img_hue,
            circles,
            Imgproc.CV_HOUGH_GRADIENT,
            2,
            minDistance,
            70,
            20,
            minRadius,
            maxRadius);

        /** Draws the circles and angle */
        drawCircles(img, circles);
        break;

      case VIEW_MODE_BW:
      /** This mode displays image in black/white to show what the algorithm sees */
        Imgproc.cvtColor(img, img_hue, Imgproc.COLOR_RGB2HSV);
        img_hue = InRangeCircles(img_hue);
        Imgproc.GaussianBlur(img_hue, img, new Size(9, 9), 10, 10);
        break;

      case VIEW_MODE_PIC:
      /** Takes pictures every 20 frames */
        frameCount++;
        /// Need for normally saving raw photos
        Imgproc.cvtColor(img, img_hue, Imgproc.COLOR_RGB2BGR);
        Core.flip(img_hue, img_hue, 0);
        if (frameCount % 10 == 0) {
          // Imgproc.cvtColor(img, img_hue, Imgproc.COLOR_RGBA2BGR);
          Highgui.imwrite("/sdcard/TestPics/test" + frameCount / 10 + ".jpg", img_hue);
        }
        break;

      case VIEW_MODE_TEST:
      /** Testing mode for new code using the pictures as a simulation */
        frameCount++;
        fileNum++;
        if (fileNum > 175) {
          fileNum = 1;
        }
        Mat temp = Highgui.imread("/sdcard/TestPics/test" + fileNum + ".jpg");
        // Mat temp = Highgui.imread("/sdcard/TestPics/test17.jpg"); //120
        Imgproc.cvtColor(temp, img_hue, Imgproc.COLOR_BGR2HSV);
        // Imgproc.cvtColor(temp, temp, Imgproc.COLOR_BGR2RGB);
        img_hue = InRangeCircles(img_hue);
        Imgproc.GaussianBlur(img_hue, img_hue, new Size(9, 9), 10, 10);
        /** Create mat for circles and apply the Hough Transform to find the circles */
        Mat circles2 = new Mat();
        Imgproc.HoughCircles(
            img_hue,
            circles2,
            Imgproc.CV_HOUGH_GRADIENT,
            2,
            minDistance,
            70,
            20,
            minRadius,
            maxRadius);
        /** Draws the circles and angle */
        drawCircles(temp, circles2);
        // Highgui.imwrite("/sdcard/TestPics/test"+(fileNum+2)+".jpg", temp);
        // drawCircles(img_hue,circles2);
        startTime = System.currentTimeMillis();
        return temp;

      default:
        break;
    }

    startTime = System.currentTimeMillis();
    return img;
  }
  public static void main(String[] args) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    CameraParameters camParam;
    camParam = new CameraParameters();
    MarkerDetector mDetector;
    mDetector = new MarkerDetector();
    ArrayList<Marker> markers;
    markers = new ArrayList<>();
    BoardDetector bd;
    bd = new BoardDetector(true);
    Board board;
    board = new Board();
    BoardConfiguration boardC;
    boardC = new BoardConfiguration();
    FiducidalMarkers fm = new FiducidalMarkers();

    Mat img = Highgui.imread("/home/jayzeegp/NetBeansProjects/ProbandoAruco/img/image-test.png");

    mDetector.detect(img, markers, camParam, -1);
    ArrayList<Integer> excludedIds = new ArrayList<>();
    excludedIds.add(23);
    excludedIds.add(12);
    excludedIds.add(45);
    excludedIds.add(11);
    excludedIds.add(44);
    excludedIds.add(22);

    ArrayList<Point> puntos = new ArrayList();
    puntos.add(0, new Point(1.04, 4.50));
    puntos.add(1, new Point(1.05, 4.51));
    puntos.add(2, new Point(1.06, 4.52));
    puntos.add(3, new Point(1.07, 4.53));

    try {
      mDetector.warp(img, img, new Size(4, 4), puntos);
    } catch (Exception ex) {
      System.out.println(ex.getMessage());
    }
    /*
     try{
         //bd.detect(markers, boardC, board, camParam);
         //Mat returned = fm.createBoardImage(new Size(2,3), 50, 5, boardC, excludedIds);
         //Mat returned = fm.createBoardImage_ChessBoard(new Size(2,3), 60, boardC, false, excludedIds);
         Mat returned = fm.createBoardImage_Frame(new Size(2,3), 50, 5, boardC, false, excludedIds);
         MatOfByte matOfByte = new MatOfByte();

        Highgui.imencode(".jpg", returned, matOfByte);

        byte[] byteArray = matOfByte.toArray();
        BufferedImage bufImage = null;
        boardC.saveToFile("/home/jayzeegp/NetBeansProjects/ProbandoAruco/board.txt");
        try {
            InputStream in = new ByteArrayInputStream(byteArray);
            bufImage = ImageIO.read(in);
        } catch (Exception e) {
        }
       JLabel picLabel = new JLabel(new ImageIcon(bufImage));
       JFrame frame = new JFrame("Original");
       frame.add(picLabel);
       frame.pack();
       frame.setVisible(true);

     }catch(Exception e){
         System.out.println("Exception: " + e.getMessage());
     }



     for(int i=0;i<markers.size();i++){
         if(markers.get(i).isValid()){
            markers.get(i).draw(img, new Scalar(255,0,0));
         }
     }


     MatOfByte matOfByte = new MatOfByte();

     Highgui.imencode(".jpg", img, matOfByte);

     byte[] byteArray = matOfByte.toArray();
     BufferedImage bufImage = null;

     try {
         InputStream in = new ByteArrayInputStream(byteArray);
         bufImage = ImageIO.read(in);
     } catch (Exception e) {
                  System.out.println("Exception: " + e.getMessage());
     }
    JLabel picLabel = new JLabel(new ImageIcon(bufImage));
    JFrame frame = new JFrame("Original");
    frame.add(picLabel);
    frame.pack();
    frame.setVisible(true);
    Mat resultado;
    */

  }
  public void btn_camera_ok(View view) {
    Log.i(TAG, "btn_camera_ok");
    if (!bPictaken) {
      ToastUtil.showShortToast(getApplicationContext(), "亲要先进行拍照哟!");
      return; // do not forget!
    }
    // async do -> runtime exception: method called after release()
    // maybe the activity releases before the thread returns!
    final Mat image = Highgui.imread(filePath);
    int width = image.width();
    int height = image.height();
    if (width > height) { // portrait should be rotated! direction? yes!
      Core.flip(image.t(), image, 0); // counter-clock wise 90
    }
    Imgproc.cvtColor(image, image, Imgproc.COLOR_BGR2GRAY); // gray
    Imgproc.resize(image, image, new Size(CommonUtil.IMAGE_WIDTH, CommonUtil.IMAGE_HEIGHT)); //
    int total = 0;
    String stotal = CommonUtil.userProps.getProperty("total");
    if (null != stotal) { // have some users!
      total = Integer.parseInt(stotal);
    }
    if (userid <= 0) { // not have this one!
      userid = total + 1;
      try { // save new data!
        CommonUtil.userProps.setProperty("total", String.valueOf(userid));
        CommonUtil.userProps.setProperty(String.valueOf(userid), name);
        CommonUtil.saveUserProperties(CommonUtil.userProps);
      } catch (Exception e) {
        e.printStackTrace();
      }
      // creat folder for this user!
      File userfolder =
          new File(
              CommonUtil.USERFOLDER.getAbsolutePath() + File.separator + String.valueOf(userid));
      if (!userfolder.exists()) {
        userfolder.mkdir();
      }
    }
    filePath =
        CommonUtil.USERFOLDER.getAbsolutePath()
            + File.separator
            + String.valueOf(userid)
            + File.separator
            + System.currentTimeMillis()
            + ".jpg"; // folder (user / userid)
    Highgui.imwrite(filePath, image);
    // save data to facedata.txt
    String data = filePath + ";" + userid + "\n"; // user image file path;user id
    try {
      RandomAccessFile facedataFile =
          new RandomAccessFile(
              CommonUtil.SDFOLDER + File.separator + CommonUtil.FACEDATA_FILENAME, "rw");
      facedataFile.seek(facedataFile.length());
      facedataFile.write(data.getBytes());
      facedataFile.close();
    } catch (FileNotFoundException e) {
      e.printStackTrace();
    } catch (IOException e) {
      e.printStackTrace();
    }
    Log.i(TAG, "image process ok");

    // add this pic to the model data
    new AsyncTask<Void, Void, Boolean>() {

      @Override
      protected Boolean doInBackground(Void... params) {
        xface.addImage(image, userid); // how to determinate the result of adding image?!TODO!
        return true;
      }

      @Override
      protected void onPostExecute(Boolean result) {
        if (result) {
          ToastUtil.showShortToast(getApplicationContext(), "照片保存成功,模型建立好咯!");
        } else {
          ToastUtil.showShortToast(getApplicationContext(), "照片保存成功,模型建立失败啦!");
        }
        btn_camera_ok.setEnabled(true);
      }

      @Override
      protected void onPreExecute() {
        ToastUtil.showShortToast(getApplicationContext(), "照片保存中...");
        btn_camera_ok.setEnabled(false); // can not let user save two images at the same time!
      }
    }.execute();
  }
  /**
   * Appelée à chaque nouvelle prise de vue par la caméra.
   *
   * <p>Son comportement sera différent suivant ce que l'on cherche à faire :
   *
   * <ul>
   *   <li>Si la porte n'est pas stable, on cherche alors à détecter l'événement porte stable pour
   *       pouvoir prendre une photo.
   *   <li>Si la porte est stable mais pas fermée, cela signifie que l'on a déjà pris une photo du
   *       contenu du frigo et on attend que la porte soit fermée pour revenir dans l'état initial.
   * </ul>
   *
   * @param inputFrame Image captée par la caméra
   */
  public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
    Mat current = inputFrame.rgba();
    if (stable && !fermee) {
      // Une photo a été prise
      // On va rechercher l'événement : le flux vidéo représente des images noires
      Scalar scalaireN = new Scalar(0x00, 0x00, 0x00, 0xFF);
      Mat noir = new Mat(current.size(), current.type(), scalaireN);
      // noir est une matrice noire
      // Comparaison avec une image noire, résultat stocké dans une matrice diffNoir
      Mat diffNoir = new Mat(current.size(), current.type());
      Core.absdiff(current, noir, diffNoir);
      Double normeDiffNoir =
          new Double(Core.norm(diffNoir)); // Calclule de la norme de cette matrice
      n.add(normeDiffNoir); // Ajout de cette norme dans un conteneur
      compteur++; // Compteur du nombre d'images prises
      if (compteur > 11) {
        // S'il y a suffisamment d'images déjà prises, on vérifie que la porte est fermée
        fermee = true;
        int i = 0;
        while (fermee && i < 10) {
          // La porte est fermee si sur les dix dernières photos prises, la différence
          // entre une image noire et l'image current n'est pas trop grande.
          if (n.get(compteur - 1 - i) > 4500) {
            fermee =
                false; // Si cette différence est trop grande, on considère que la porte n'est pas
            // fermée
          }
          i++;
        } // Si elle n'a jamais été trop grande, la porte est effectivement fermée
        if (fermee) {
          // Remise à 0 du compteur s'il doit être réutilisé pour une nouvelle photo
          // De même pour le tableau n
          compteur = 0;
          n.clear();
          finish(); // Retour sur l'activité principale qui attend une ouverture du frigo.
        }
      }
    } else if (!stable) {
      // Aucune photo n'a encore été prise
      // On va rechercher l'événement : l'image est stable
      if (buffer == null) { // Première image reçue, il faut créer une matrice buffer qui contiendra
        // l'image précédente
        buffer = new Mat(current.size(), current.type());
        buffer = current.clone();
      } else { // C'est au moins la deuxième image reçue
        // Comparaison entre l'image précédente et l'image courante, résultat stocké dans une
        // matrice diffBuffer
        Mat diffBuffer = new Mat(current.size(), current.type());
        Core.absdiff(current, buffer, diffBuffer);
        Double normeDiffBuffer =
            new Double(Core.norm(diffBuffer)); // Calcul de la norme de cette matrice
        n.add(normeDiffBuffer); // Ajout de cette norme dans un conteneur
        compteur++; // Compteur du nombre d'images prises
        if (compteur > 11) {
          // S'il y a suffisamment d'images déjà prises, on vérifie que la porte est stable
          stable = true;
          int i = 0;
          while (stable && i < 10) {
            // On est stable si sur les dix dernières prises, la différence entre
            // l'image current est l'image stockée n'est pas trop grande
            if (n.get(compteur - 1 - i) > 4500) {
              stable = false;
            }
            i++;
          }
          if (stable) {
            Log.i(TAG, "Prise de la photo");
            // Si l'image est stable, il faut vérifier tout d'abord que la porte n'est pas fermée.
            // (on effectue ici le même traîtement que pour une détection de porte fermée)
            Scalar scalaireN = new Scalar(0x00, 0x00, 0x00, 0xFF);
            Mat noir = new Mat(current.size(), current.type(), scalaireN);
            Mat diffNoir = new Mat(current.size(), current.type());
            Core.absdiff(current, noir, diffNoir);
            Double normeDiffNoir = new Double(Core.norm(diffNoir));
            if (normeDiffNoir > 4500) {
              // Si la porte n'est pas fermée, on va sauvegarder l'image avant de l'envoyer
              File pictureFileDir = getDir();
              SimpleDateFormat dateFormat = new SimpleDateFormat("dd-MM-yyyy-HH.mm.ss");
              String date = dateFormat.format(new Date());
              String photoFile = "PictureCV_" + date + ".jpg"; // Nom du fichier
              String filename = pictureFileDir.getPath() + File.separator + photoFile;
              // On doit convertir les couleurs avant de sauvegarder l'image.
              // La description de la fonction cvtColor explique pourquoi
              Imgproc.cvtColor(current, current, Imgproc.COLOR_BGR2RGB);
              Highgui.imwrite(filename, current); // Sauvegarde
              Log.i(TAG, "Photo sauvegardée");
              // Remise à 0 du compteur s'il doit être réutilisé pour une nouvelle photo
              // De même pour le tableau n
              compteur = 0;
              n.clear();

              /*
              //Tentative de reconnaissance d'image
              //On va essayer de détecter la présence d'une banane pour chaque nouvelle image
                	//captée par le téléphone
                	Mat Grey = inputFrame.gray(); //Image prise par la caméra
                	MatOfRect bananas = new MatOfRect();
                	Size minSize = new Size(30,20);
                	Size maxSize = new Size(150,100);
                	Log.i(TAG, "Tentative de détection de banane");
                	mCascadeClassifier.detectMultiScale(Grey, bananas, 1.1, 0, 10,minSize,maxSize);
                	if (bananas.rows()>0){
                		Log.i(TAG, "Nombre de bananes détectées : " + bananas.rows());
                	}
              envoiPhoto(filename, bananas.rows()); //Envoi de la photo avec les données de reconnaissance
              //Fin de la reconnaissance de l'image
              */

              envoiPhoto(filename); // Envoi de la photo sans les données de reconnaissance

            } else {
              // Cas où a porte est fermée
              // Remise à 0 du compteur s'il doit être réutilisé pour une nouvelle photo
              // De même pour le tableau n
              compteur = 0;
              n.clear();
              finish();
            }
          }
        }
        buffer = current.clone();
      }
    }
    return inputFrame.rgba();
  }