public void templateMatching() {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);
    int match_method = 5;
    int max_Trackbar = 5;
    Mat data = Highgui.imread("images/training_data/1" + "/data (" + 1 + ").jpg");
    Mat temp = Highgui.imread("images/template.jpg");
    Mat img = data.clone();

    int result_cols = img.cols() - temp.cols() + 1;
    int result_rows = img.rows() - temp.rows() + 1;
    Mat result = new Mat(result_rows, result_cols, CvType.CV_32FC1);

    Imgproc.matchTemplate(img, temp, result, match_method);
    Core.normalize(result, result, 0, 1, Core.NORM_MINMAX, -1, new Mat());

    double minVal;
    double maxVal;
    Point minLoc;
    Point maxLoc;
    Point matchLoc;
    // minMaxLoc( result, &minVal, &maxVal, &minLoc, &maxLoc, Mat() );
    Core.MinMaxLocResult res = Core.minMaxLoc(result);

    if (match_method == Imgproc.TM_SQDIFF || match_method == Imgproc.TM_SQDIFF_NORMED) {
      matchLoc = res.minLoc;
    } else {
      matchLoc = res.maxLoc;
    }

    // / Show me what you got
    Core.rectangle(
        img,
        matchLoc,
        new Point(matchLoc.x + temp.cols(), matchLoc.y + temp.rows()),
        new Scalar(0, 255, 0));

    // Save the visualized detection.
    Highgui.imwrite("images/samp.jpg", img);
  }
  // Transform the json-type feature to mat-type
  public static Mat json2mat(String json) {

    JsonParser parser = new JsonParser();
    JsonElement parseTree = parser.parse(json);

    // Verify the input is JSON type
    if (!parseTree.isJsonObject()) {
      System.out.println("The input is not a JSON type...\nExiting...");
      System.exit(1);
    }
    JsonObject jobj = parser.parse(json).getAsJsonObject();

    if (jobj == null || !jobj.isJsonObject() || jobj.isJsonNull()) {
      return null;
    }

    // Detect broken/null features
    JsonElement r = jobj.get("rows");
    if (r == null) {
      return null;
    }

    int rows = jobj.get("rows").getAsInt();
    int cols = jobj.get("cols").getAsInt();
    int type = jobj.get("type").getAsInt();
    String data = jobj.get("data").getAsString();
    String[] pixs = data.split(",");

    Mat descriptor = new Mat(rows, cols, type);
    for (String pix : pixs) {
      String[] tmp = pix.split(" ");
      int r_pos = Integer.valueOf(tmp[0]);
      int c_pos = Integer.valueOf(tmp[1]);
      double rgb = Double.valueOf(tmp[2]);
      descriptor.put(r_pos, c_pos, rgb);
    }
    return descriptor;
  }
  public static void extractQueryFeatures2HDFS(String filename, Job job) throws IOException {

    // Read the local image.jpg as a Mat
    Mat query_mat_float =
        Highgui.imread(LOCAL_USER_DIR + ID + INPUT + "/" + filename, CvType.CV_32FC3);
    // Convert RGB to GRAY
    Mat query_gray = new Mat();
    Imgproc.cvtColor(query_mat_float, query_gray, Imgproc.COLOR_RGB2GRAY);
    // Convert the float type to unsigned integer(required by SIFT)
    Mat query_mat_byte = new Mat();
    query_gray.convertTo(query_mat_byte, CvType.CV_8UC3);
    //        // Resize the image to 1/FACTOR both width and height
    //        Mat query_mat_byte = FeatureExtraction.resize(query_mat_byte);

    // Extract the feature from the (Mat)image
    Mat query_features = FeatureExtraction.extractFeature(query_mat_byte);

    System.out.println(PREFIX + "Extracting the query image feature...");
    System.out.println("query_mat(float,color):" + query_mat_float);
    System.out.println("query_mat(float,gray):" + query_gray);
    System.out.println("query_mat(byte,gray):" + query_mat_byte);
    System.out.println("query_mat_features:" + query_features);
    System.out.println();

    // Store the feature to the hdfs in order to use it later in different map tasks
    System.out.println(PREFIX + "Generating the feature file for the query image in HDFS...");
    FileSystem fs = FileSystem.get(job.getConfiguration());
    String featureFileName = filename.substring(0, filename.lastIndexOf(".")) + ".json";
    FSDataOutputStream fsDataOutputStream =
        fs.create(new Path(HDFS_HOME + USER + ID + INPUT + "/" + featureFileName));
    BufferedWriter bw =
        new BufferedWriter(new OutputStreamWriter(fsDataOutputStream, StandardCharsets.UTF_8));
    bw.write(FeatureExtraction.mat2json(query_features));
    bw.close();
    System.out.println(PREFIX + "Query feature extraction finished...");
    System.out.println();
  }
  public void copyMat(Mat src, Mat dest) {
    int srcRows = src.rows();
    int srcCols = src.cols();
    int destRows = dest.rows();
    int destCols = dest.cols();

    for (int i = 0; i < srcRows; i++) {
      for (int j = 0; j < srcCols; j++) {
        double bit = src.get(i, j)[0];
        dest.put(i, j, bit);
        System.out.println(bit);
      }
    }
  }
  public static Mat getCCH(Mat image) {
    ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();
    Imgproc.findContours(
        image, contours, hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_NONE);

    Mat chainHistogram = Mat.zeros(1, 8, CvType.CV_32F);
    int n = 0;
    MatOfPoint2f approxCurve = new MatOfPoint2f();
    for (MatOfPoint contour : contours) {

      // get the freeman chain code from the contours
      int rows = contour.rows();
      // System.out.println("\nrows"+rows+"\n"+contour.dump());
      int direction = 7;
      Mat prevPoint = contours.get(0).row(0);
      n += rows - 1;
      for (int i = 1; i < rows; i++) {
        // get the current point
        double x1 = contour.get(i - 1, 0)[1];
        double y1 = contour.get(i - 1, 0)[0];

        // get the second point
        double x2 = contour.get(i, 0)[1];
        double y2 = contour.get(i, 0)[0];

        if (x2 == x1 && y2 == y1 + 1) direction = 0;
        else if (x2 == x1 - 1 && y2 == y1 + 1) direction = 1;
        else if (x2 == x1 - 1 && y2 == y1) direction = 2;
        else if (x2 == x1 - 1 && y2 == y1 - 1) direction = 3;
        else if (x2 == x1 && y2 == y1 - 1) direction = 4;
        else if (x2 == x1 + 1 && y2 == y1 - 1) direction = 5;
        else if (x2 == x1 + 1 && y2 == y1) direction = 6;
        else if (x2 == x1 + 1 && y2 == y1 + 1) direction = 7;
        else System.out.print("err");
        double counter = chainHistogram.get(0, direction)[0];
        chainHistogram.put(0, direction, ++counter);
        System.out.print(direction);
      }
    }
    System.out.println("\n" + chainHistogram.dump());
    Scalar alpha = new Scalar(n); // the factor
    Core.divide(chainHistogram, alpha, chainHistogram);
    System.out.println("\nrows=" + n + " " + chainHistogram.dump());
    return chainHistogram;
  }
    public void map(Text key, Text value, Context context)
        throws InterruptedException, IOException {

      String filename = key.toString();
      String json = value.toString();

      // Make sure the input is valid
      if (!(filename.isEmpty() || json.isEmpty())) {

        // Change the json-type feature to Mat-type feature
        Mat descriptor = json2mat(json);
        if (descriptor != null) {
          // Read the query feature from the cache in Hadoop
          Mat query_features;
          String pathStr = context.getConfiguration().get("featureFilePath");
          FileSystem fs = FileSystem.get(context.getConfiguration());
          FSDataInputStream fsDataInputStream = fs.open(new Path(pathStr));
          StringBuilder sb = new StringBuilder();

          // Use a buffer to read the query_feature
          int remain = fsDataInputStream.available();
          while (remain > 0) {
            int read;
            byte[] buf = new byte[BUF_SIZE];
            read = fsDataInputStream.read(buf, fsDataInputStream.available() - remain, BUF_SIZE);
            sb.append(new String(buf, 0, read, StandardCharsets.UTF_8));
            remain = remain - read;
            System.out.println("remain:" + remain + "\tread:" + read + "\tsb.size:" + sb.length());
          }

          // Read the query_feature line by line
          //                    Scanner sc = new Scanner(fsDataInputStream, "UTF-8");
          //                    StringBuilder sb = new StringBuilder();
          //                    while (sc.hasNextLine()) {
          //                        sb.append(sc.nextLine());
          //                    }
          //                    String query_json = sb.toString();
          //                    String query_json = new String(buf, StandardCharsets.UTF_8);

          String query_json = sb.toString();
          fsDataInputStream.close();
          query_features = json2mat(query_json);

          // Get the similarity of the current database image against the query image
          DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
          MatOfDMatch matches = new MatOfDMatch();

          // Ensure the two features have same length of cols (the feature extracted are all 128
          // cols(at least in this case))
          if (query_features.cols() == descriptor.cols()) {

            matcher.match(query_features, descriptor, matches);
            DMatch[] dMatches = matches.toArray();

            // Calculate the max/min distances
            //                    double max_dist = Double.MAX_VALUE;
            //                    double min_dist = Double.MIN_VALUE;
            double max_dist = 0;
            double min_dist = 100;
            for (int i = 0; i < dMatches.length; i++) {
              double dist = dMatches[i].distance;
              if (min_dist > dist) min_dist = dist;
              if (max_dist < dist) max_dist = dist;
            }
            // Only distances ≤ threshold are good matches
            double threshold = max_dist * THRESHOLD_FACTOR;
            //                    double threshold = min_dist * 2;
            LinkedList<DMatch> goodMatches = new LinkedList<DMatch>();

            for (int i = 0; i < dMatches.length; i++) {
              if (dMatches[i].distance <= threshold) {
                goodMatches.addLast(dMatches[i]);
              }
            }

            // Get the ratio of good_matches to all_matches
            double ratio = (double) goodMatches.size() / (double) dMatches.length;

            System.out.println("*** current_record_filename:" + filename + " ***");
            System.out.println("feature:" + descriptor + "\nquery_feature:" + query_features);
            System.out.println(
                "min_dist of keypoints:" + min_dist + "  max_dist of keypoints:" + max_dist);
            System.out.println(
                "total_matches:" + dMatches.length + "\tgood_matches:" + goodMatches.size());
            //                    System.out.println("type:" + descriptor.type() + " channels:" +
            // descriptor.channels() + " rows:" + descriptor.rows() + " cols:" + descriptor.cols());
            //                    System.out.println("qtype:" + query_features.type() + "
            // qchannels:" + query_features.channels() + " qrows:" + query_features.rows() + "
            // qcols:" + query_features.cols());
            System.out.println();

            if (ratio > PERCENTAGE_THRESHOLD) {
              // Key:1        Value:filename|ratio
              context.write(ONE, new Text(filename + "|" + ratio));
              //                        context.write(ONE, new Text(filename + "|" +
              // String.valueOf(goodMatches.size())));
            }
          } else {
            System.out.println("The size of the features are not equal");
          }
        } else {
          // a null pointer, do nothing
          System.out.println("A broken/null feature:" + filename);
          System.out.println();
        }
      }
    }
  public static void main(String[] args) {
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    //      Mat mat = Mat.eye( 3, 3, CvType.CV_8UC1 );
    //      System.out.println( "mat = " + mat.dump() );

    Sample n = new Sample();
    //   n.templateMatching();

    // put text in image
    //      Mat data= Highgui.imread("images/erosion.jpg");

    //      Core.putText(data, "Sample", new Point(50,80), Core.FONT_HERSHEY_SIMPLEX, 1, new
    // Scalar(0,0,0),2);
    //
    //      Highgui.imwrite("images/erosion2.jpg", data);

    // getting dct of an image
    String path = "images/croppedfeature/go (20).jpg";
    path = "images/wordseg/img1.png";
    Mat image = Highgui.imread(path, Highgui.IMREAD_GRAYSCALE);
    ArrayList<MatOfPoint> contours = new ArrayList<MatOfPoint>();

    Imgproc.threshold(image, image, 0, 255, Imgproc.THRESH_OTSU);
    Imgproc.threshold(image, image, 220, 128, Imgproc.THRESH_BINARY_INV);
    Mat newImg = new Mat(45, 100, image.type());

    newImg.setTo(new Scalar(0));
    n.copyMat(image, newImg);

    int vgap = 25;
    int hgap = 45 / 3;

    Moments m = Imgproc.moments(image, false);
    Mat hu = new Mat();
    Imgproc.HuMoments(m, hu);
    System.out.println(hu.dump());

    //      //divide the mat into 12 parts then get the features of each part
    //      int count=1;
    //      for(int j=0; j<45; j+=hgap){
    //    	  for(int i=0;i<100;i+=vgap){
    //    		  Mat result = newImg.submat(j, j+hgap, i, i+vgap);
    //
    //
    //    		  Moments m= Imgproc.moments(result, false);
    //    		  double m01= m.get_m01();
    //    		  double m00= m.get_m00();
    //    		  double m10 = m.get_m10();
    //    		  int x= m00!=0? (int)(m10/m00):0;
    //    		  int y= m00!=0? (int)(m01/m00):0;
    //    		  Mat hu= new Mat();
    //    		  Imgproc.HuMoments(m, hu);
    //    		  System.out.println(hu.dump());
    //    		  System.out.println(count+" :"+x+" and "+y);
    //    		  Imgproc.threshold(result, result, 0,254, Imgproc.THRESH_BINARY_INV);
    //    		  Highgui.imwrite("images/submat/"+count+".jpg", result);
    //    		  count++;
    //
    //    	  }
    //      }
    //
    //    for(int i=vgap;i<100;i+=vgap){
    //	  Point pt1= new Point(i, 0);
    //      Point pt2= new Point(i, 99);
    //      Core.line(newImg, pt1, pt2, new Scalar(0,0,0));
    //  }
    //  for(int i=hgap;i<45;i+=hgap){
    //	  Point pt1= new Point(0, i);
    //      Point pt2= new Point(99, i);
    //      Core.line(newImg, pt1, pt2, new Scalar(0,0,0));
    //  }
    //      Highgui.imwrite("images/submat/copyto.jpg", newImg);
  }