Example #1
0
  public void performMatch() {

    // create feature detectors and feature extractors
    FeatureDetector orbDetector = FeatureDetector.create(FeatureDetector.ORB);
    DescriptorExtractor orbExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

    // set the keypoints
    keyPointImg = new MatOfKeyPoint();
    orbDetector.detect(imgGray, keyPointImg);

    MatOfKeyPoint keyPointTempl = new MatOfKeyPoint();
    orbDetector.detect(templGray, keyPointTempl);

    // get the descriptions
    descImg = new Mat(image.size(), image.type());
    orbExtractor.compute(imgGray, keyPointImg, descImg);

    Mat descTempl = new Mat(template.size(), template.type());
    orbExtractor.compute(templGray, keyPointTempl, descTempl);

    // perform matching
    matches = new MatOfDMatch();
    DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
    matcher.match(descImg, descTempl, matches);

    Log.i("perform match result", matches.size().toString());
  }
  public void testClear() {
    matcher.add(Arrays.asList(new Mat()));

    matcher.clear();

    assertTrue(matcher.empty());
  }
  public void testCloneBoolean() {
    matcher.add(Arrays.asList(new Mat()));

    DescriptorMatcher cloned = matcher.clone(true);

    assertNotNull(cloned);
    assertTrue(cloned.empty());
  }
  public void testMatchMatListOfDMatch() {
    Mat train = getTrainDescriptors();
    Mat query = getQueryDescriptors();
    MatOfDMatch matches = new MatOfDMatch();
    matcher.add(Arrays.asList(train));

    matcher.match(query, matches);

    assertArrayDMatchEquals(truth, matches.toArray(), EPS);
  }
  public void testGetTrainDescriptors() {
    Mat train = new Mat(1, 1, CvType.CV_8U, new Scalar(123));
    Mat truth = train.clone();
    matcher.add(Arrays.asList(train));

    List<Mat> descriptors = matcher.getTrainDescriptors();

    assertEquals(1, descriptors.size());
    assertMatEqual(truth, descriptors.get(0));
  }
  public void testMatchMatListOfDMatchListOfMat() {
    Mat train = getTrainDescriptors();
    Mat query = getQueryDescriptors();
    Mat mask = getMaskImg();
    MatOfDMatch matches = new MatOfDMatch();
    matcher.add(Arrays.asList(train));

    matcher.match(query, matches, Arrays.asList(mask));

    assertListDMatchEquals(Arrays.asList(truth[0], truth[1]), matches.toList(), EPS);
  }
  public void testRead() {
    String filename = OpenCVTestRunner.getTempFileName("yml");
    writeFile(filename, "%YAML:1.0\n");

    matcher.read(filename);
    assertTrue(true); // BruteforceMatcher has no settings
  }
  public void testWrite() {
    String filename = OpenCVTestRunner.getTempFileName("yml");

    matcher.write(filename);

    String truth = "%YAML:1.0\n";
    assertEquals(truth, readFile(filename));
  }
  public void onCameraViewStarted(int width, int height) {
    try {

      detectThread = null;

      if (detectThread == null) {
        detectThread = new DetectThread();
        thread = new Thread(detectThread);
        thread.setPriority(Thread.MIN_PRIORITY);
        thread.start();
      }

      // -- Step 1: Detect the keypoints using SURF Detector
      featureDetector = FeatureDetector.create(FeatureDetector.FAST);

      // -- Step 2: Calculate descriptors (feature vectors)
      extractor = DescriptorExtractor.create(DescriptorExtractor.FREAK);

      // -- Step 3: Matching descriptor vectors using FLANN matcher
      matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_L1);

      img_object = Utils.loadResource(this, R.drawable.cardobj, Highgui.CV_LOAD_IMAGE_GRAYSCALE);

      img_scene = new Mat();
      // img_scene = Utils.loadResource(this, R.drawable.cardscene,
      // Highgui.CV_LOAD_IMAGE_GRAYSCALE);

      matches = new MatOfDMatch();

      listGoodMatches = new ArrayList<>();

      keypoints_object = new MatOfKeyPoint();
      keypoints_scene = new MatOfKeyPoint();
      descriptors_object = new Mat();
      descriptors_scene = new Mat();

      obj = new MatOfPoint2f();
      scene = new MatOfPoint2f();

      H = new Mat();
      obj_corners = new MatOfPoint2f();

      scene_corners = new MatOfPoint2f();
      min_dist = 9999999;

      p0 = new Point(0, 0);
      p1 = new Point(0, 0);
      p2 = new Point(0, 0);
      p3 = new Point(0, 0);

      good_matches = new MatOfDMatch();

      listPointScene = new ArrayList<>();

    } catch (Exception ex) {

    }
  }
  protected void setUp() throws Exception {
    super.setUp();
    matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMINGLUT);
    matSize = 100;

    truth =
        new DMatch[] {
          new DMatch(0, 0, 0, 51),
          new DMatch(1, 2, 0, 42),
          new DMatch(2, 1, 0, 40),
          new DMatch(3, 3, 0, 53)
        };
  }
  public void testMatchMatMatListOfDMatch() {
    Mat train = getTrainDescriptors();
    Mat query = getQueryDescriptors();
    MatOfDMatch matches = new MatOfDMatch();

    matcher.match(query, train, matches);

    /*
    OpenCVTestRunner.Log("matches found: " + matches.size());
    for (DMatch m : matches.toArray())
        OpenCVTestRunner.Log(m.toString());
    */

    assertArrayDMatchEquals(truth, matches.toArray(), EPS);
  }
    public void map(Text key, Text value, Context context)
        throws InterruptedException, IOException {

      String filename = key.toString();
      String json = value.toString();

      // Make sure the input is valid
      if (!(filename.isEmpty() || json.isEmpty())) {

        // Change the json-type feature to Mat-type feature
        Mat descriptor = json2mat(json);
        if (descriptor != null) {
          // Read the query feature from the cache in Hadoop
          Mat query_features;
          String pathStr = context.getConfiguration().get("featureFilePath");
          FileSystem fs = FileSystem.get(context.getConfiguration());
          FSDataInputStream fsDataInputStream = fs.open(new Path(pathStr));
          StringBuilder sb = new StringBuilder();

          // Use a buffer to read the query_feature
          int remain = fsDataInputStream.available();
          while (remain > 0) {
            int read;
            byte[] buf = new byte[BUF_SIZE];
            read = fsDataInputStream.read(buf, fsDataInputStream.available() - remain, BUF_SIZE);
            sb.append(new String(buf, 0, read, StandardCharsets.UTF_8));
            remain = remain - read;
            System.out.println("remain:" + remain + "\tread:" + read + "\tsb.size:" + sb.length());
          }

          // Read the query_feature line by line
          //                    Scanner sc = new Scanner(fsDataInputStream, "UTF-8");
          //                    StringBuilder sb = new StringBuilder();
          //                    while (sc.hasNextLine()) {
          //                        sb.append(sc.nextLine());
          //                    }
          //                    String query_json = sb.toString();
          //                    String query_json = new String(buf, StandardCharsets.UTF_8);

          String query_json = sb.toString();
          fsDataInputStream.close();
          query_features = json2mat(query_json);

          // Get the similarity of the current database image against the query image
          DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.FLANNBASED);
          MatOfDMatch matches = new MatOfDMatch();

          // Ensure the two features have same length of cols (the feature extracted are all 128
          // cols(at least in this case))
          if (query_features.cols() == descriptor.cols()) {

            matcher.match(query_features, descriptor, matches);
            DMatch[] dMatches = matches.toArray();

            // Calculate the max/min distances
            //                    double max_dist = Double.MAX_VALUE;
            //                    double min_dist = Double.MIN_VALUE;
            double max_dist = 0;
            double min_dist = 100;
            for (int i = 0; i < dMatches.length; i++) {
              double dist = dMatches[i].distance;
              if (min_dist > dist) min_dist = dist;
              if (max_dist < dist) max_dist = dist;
            }
            // Only distances ≤ threshold are good matches
            double threshold = max_dist * THRESHOLD_FACTOR;
            //                    double threshold = min_dist * 2;
            LinkedList<DMatch> goodMatches = new LinkedList<DMatch>();

            for (int i = 0; i < dMatches.length; i++) {
              if (dMatches[i].distance <= threshold) {
                goodMatches.addLast(dMatches[i]);
              }
            }

            // Get the ratio of good_matches to all_matches
            double ratio = (double) goodMatches.size() / (double) dMatches.length;

            System.out.println("*** current_record_filename:" + filename + " ***");
            System.out.println("feature:" + descriptor + "\nquery_feature:" + query_features);
            System.out.println(
                "min_dist of keypoints:" + min_dist + "  max_dist of keypoints:" + max_dist);
            System.out.println(
                "total_matches:" + dMatches.length + "\tgood_matches:" + goodMatches.size());
            //                    System.out.println("type:" + descriptor.type() + " channels:" +
            // descriptor.channels() + " rows:" + descriptor.rows() + " cols:" + descriptor.cols());
            //                    System.out.println("qtype:" + query_features.type() + "
            // qchannels:" + query_features.channels() + " qrows:" + query_features.rows() + "
            // qcols:" + query_features.cols());
            System.out.println();

            if (ratio > PERCENTAGE_THRESHOLD) {
              // Key:1        Value:filename|ratio
              context.write(ONE, new Text(filename + "|" + ratio));
              //                        context.write(ONE, new Text(filename + "|" +
              // String.valueOf(goodMatches.size())));
            }
          } else {
            System.out.println("The size of the features are not equal");
          }
        } else {
          // a null pointer, do nothing
          System.out.println("A broken/null feature:" + filename);
          System.out.println();
        }
      }
    }
 public void testIsMaskSupported() {
   assertTrue(matcher.isMaskSupported());
 }
Example #14
0
  public Template performMatches(Map<String, Template> templates) {

    // create feature detectors and feature extractors
    FeatureDetector orbDetector = FeatureDetector.create(FeatureDetector.ORB);
    DescriptorExtractor orbExtractor = DescriptorExtractor.create(DescriptorExtractor.ORB);

    MatOfKeyPoint keyPointImgT;
    Mat descImgT;
    // set the keypoints
    keyPointImgT = new MatOfKeyPoint();
    orbDetector.detect(imgGray, keyPointImgT);

    descImgT = new Mat(image.size(), image.type());
    orbExtractor.compute(imgGray, keyPointImgT, descImgT);

    Template best = null;
    matches = null;
    Map.Entry<String, Template> maxEntry = null;
    //  MatOfDMatch matches = new MatOfDMatch();

    for (Map.Entry<String, Template> entry : templates.entrySet()) {

      MatOfKeyPoint keyPointTempl = null;
      Mat descTempl = null;
      Mat tGray = null;

      Template t = entry.getValue();
      if (null == t.getTemplGray() || null == t.getDescTempl() || null == t.getKeyPointTempl()) {
        // read image from stored data
        Mat templ = readImgFromFile(t.getTemplName());

        tGray = new Mat(templ.size(), templ.type());
        Imgproc.cvtColor(templ, tGray, Imgproc.COLOR_BGRA2GRAY);

        keyPointTempl = new MatOfKeyPoint();
        orbDetector.detect(tGray, keyPointTempl);

        descTempl = new Mat(templ.size(), templ.type());
        orbExtractor.compute(tGray, keyPointTempl, descTempl);

        t.setKeyPointTempl(keyPointTempl);
        t.setDescTempl(descTempl);
      } else {
        descTempl = t.getDescTempl();
      }

      MatOfDMatch matchWithT = new MatOfDMatch();
      DescriptorMatcher matcher = DescriptorMatcher.create(DescriptorMatcher.BRUTEFORCE_HAMMING);
      // matcher.radiusMatch(descImgT, descTempl, matchWithT,200);//
      matcher.match(descImgT, descTempl, matchWithT);
      List<DMatch> matchList = matchWithT.toList();
      //            float min = Float.MAX_VALUE;
      //            float max = Float.MIN_VALUE;
      //            for(int i=0;i<matchList.size();i++){
      //                min = matchList.get(i).distance<min?matchList.get(i).distance:min;
      //                max = matchList.get(i).distance>max?matchList.get(i).distance:max;
      //            }
      //            Log.i("min distance","min distance is::"+min+"max
      // distance::"+max+"size::"+matchList.size());

      //            Collections.sort(matchList, new Comparator<DMatch>() {
      //                @Override
      //                public int compare(DMatch o1, DMatch o2) {
      //                    if (o1.distance < o2.distance)
      //                        return -1;
      //                    if (o1.distance > o2.distance)
      //                        return 1;
      //                    return 0;
      //                }
      //            });

      float ratio = -1;
      if (matchList.size() > 0) ratio = findMinTwoRatio(matchList);

      if (ratio > 0.8 || ratio == -1) continue;
      Log.i("match", "ratio::" + ratio);

      // Todo:revisit logic
      if (matches == null || (matchWithT.size().height > matches.size().height)) {
        matches = matchWithT;
        keyPointImg = keyPointImgT;
        descImg = descImgT;
        best = t;
      }
    }

    //  Log.i("perform match result", matches.size().toString());

    return best;
  }
  private void detectObject() {

    readLock.lock();

    featureDetector.detect(img_scene, keypoints_scene);
    featureDetector.detect(img_object, keypoints_object);

    extractor.compute(img_object, keypoints_object, descriptors_object);
    extractor.compute(img_scene, keypoints_scene, descriptors_scene);

    readLock.unlock();

    if (!descriptors_scene.empty()) {
      matcher.match(descriptors_object, descriptors_scene, matches);

      // readLock.unlock();

      //
      listMatches = matches.toList();

      int size = descriptors_object.rows();

      // -- Quick calculation of max and min distances between keypoints
      for (int i = 0; i < size; i++) {
        double dist = listMatches.get(i).distance;
        if (dist < min_dist) {
          min_dist = dist;
        }
      }

      Log.e("Min", min_dist + "");

      threeMinDist = 3 * min_dist;

      listGoodMatches.removeAll(listGoodMatches);

      for (int i = 0; i < size; i++) {
        DMatch dMatch = listMatches.get(i);

        float distance = dMatch.distance;

        if (distance < threeMinDist) {
          listGoodMatches.add(dMatch);
        }
      }

      // good_matches.fromList(listGoodMatches);

      Log.e("Matches", listMatches.size() + "");
      Log.e("Good Matches", listGoodMatches.size() + "");
      //

      if (listGoodMatches.size() > 4) {
        Point pointObj[] = new Point[listGoodMatches.size()];
        Point pointScene[] = new Point[listGoodMatches.size()];

        listKeyPointObject = keypoints_object.toList();
        listKeyPointScene = keypoints_scene.toList();

        // listPointScene.removeAll(listPointScene);
        for (int i = 0; i < listGoodMatches.size(); i++) {
          // -- Get the keypoints from the good matches
          pointObj[i] = listKeyPointObject.get(listGoodMatches.get(i).queryIdx).pt;
          pointScene[i] = listKeyPointScene.get(listGoodMatches.get(i).trainIdx).pt;

          // listPointScene.add(listKeyPointScene.get(listGoodMatches.get(i).trainIdx).pt);
        }

        obj.fromArray(pointObj);
        scene.fromArray(pointScene);

        Log.e("Before findHomography", "");

        H = Calib3d.findHomography(obj, scene, Calib3d.RANSAC, 9);

        Log.e("AFTERRR findHomography", "");

        pointObjConners[0] = new Point(0, 0);
        pointObjConners[1] = new Point(img_object.cols(), 0);
        pointObjConners[2] = new Point(img_object.cols(), img_object.rows());
        pointObjConners[3] = new Point(0, img_object.rows());

        obj_corners.fromArray(pointObjConners);

        Core.perspectiveTransform(obj_corners, scene_corners, H);

        p0 = new Point(scene_corners.toList().get(0).x, scene_corners.toList().get(0).y + 0);
        p1 = new Point(scene_corners.toList().get(1).x, scene_corners.toList().get(1).y + 0);
        p2 = new Point(scene_corners.toList().get(2).x, scene_corners.toList().get(2).y + 0);
        p3 = new Point(scene_corners.toList().get(3).x, scene_corners.toList().get(3).y + 0);

        Log.e("POINT THREAD", p0.toString() + p1.toString() + p2.toString() + p3.toString());

        Log.e("detect ok", "detect ok");
      }

    } else {
      Log.e("No descritor", "No descritor");

      // readLock.unlock();
    }
  }
 public void testAdd() {
   matcher.add(Arrays.asList(new Mat()));
   assertFalse(matcher.empty());
 }
 public void testEmpty() {
   assertTrue(matcher.empty());
 }
 public void testTrain() {
   matcher.train(); // BruteforceMatcher does not need to train
 }