private void CreateAuxiliaryMats() {
    if (mRgba.empty()) return;

    mSizeRgba = mRgba.size();

    int rows = (int) mSizeRgba.height;
    int cols = (int) mSizeRgba.width;

    int left = cols / 8;
    int top = rows / 8;

    int width = cols * 3 / 4;
    int height = rows * 3 / 4;

    if (mRgbaInnerWindow == null)
      mRgbaInnerWindow = mRgba.submat(top, top + height, left, left + width);
    mSizeRgbaInner = mRgbaInnerWindow.size();

    if (mGrayInnerWindow == null && !mGray.empty())
      mGrayInnerWindow = mGray.submat(top, top + height, left, left + width);

    if (mBlurWindow == null) mBlurWindow = mRgba.submat(0, rows, cols / 3, cols * 2 / 3);

    if (mZoomCorner == null)
      mZoomCorner = mRgba.submat(0, rows / 2 - rows / 10, 0, cols / 2 - cols / 10);

    if (mZoomWindow == null)
      mZoomWindow =
          mRgba.submat(
              rows / 2 - 9 * rows / 100,
              rows / 2 + 9 * rows / 100,
              cols / 2 - 9 * cols / 100,
              cols / 2 + 9 * cols / 100);
  }
 private void checkChanges(ScreenImage img) {
   if (Settings.UseImageFinder) {
     if (_lastImageMat.empty()) {
       _lastImageMat = Image.createMat(img.getImage());
       return;
     }
     ImageFinder f = new ImageFinder(_lastImageMat);
     f.setMinChanges(_minChanges);
     org.opencv.core.Mat current = Image.createMat(img.getImage());
     if (f.hasChanges(current)) {
       // TODO implement ChangeObserver: processing changes
       Debug.log(3, "ChangeObserver: processing changes");
     }
     _lastImageMat = current;
   } else {
     if (_lastImgMat == null) {
       _lastImgMat = Image.convertBufferedImageToMat(img.getImage());
       return;
     }
     FindInput fin = new FindInput();
     fin.setSource(_lastImgMat);
     Mat target = Image.convertBufferedImageToMat(img.getImage());
     fin.setTarget(target);
     fin.setSimilarity(_minChanges);
     FindResults results = Vision.findChanges(fin);
     try {
       callChangeObserver(results);
     } catch (AWTException e) {
       Debug.error("EventMgr: checkChanges: ", e.getMessage());
     }
     _lastImgMat = target;
   }
 }
Example #3
0
  private void CreateAuxiliaryMats() {
    if (mGray.empty()) return;

    int rows = mGray.rows();
    int cols = mGray.cols();

    if (mZoomWindow == null) {
      mZoomWindow = mRgba.submat(rows / 2 + rows / 10, rows, cols / 2 + cols / 10, cols);
      mZoomWindow2 = mRgba.submat(0, rows / 2 - rows / 10, cols / 2 + cols / 10, cols);
    }
  }
Example #4
0
        @Override
        public void onPictureTaken(byte[] arg0, Camera arg1) {
          Bitmap bitmapPicture = BitmapFactory.decodeByteArray(arg0, 0, arg0.length);
          mView.saveCurrentFilterSet(); // Store filter for later reference

          // Get current running filter
          if (!mView.curFilterSubset().equals("")) {
            Mat imgMAT = new Mat();
            Utils.bitmapToMat(bitmapPicture, imgMAT);
            // If the image is empty
            if (imgMAT.empty()) return; // Ignore
            // Apply filters
            mView.applyCurrentFilters(imgMAT);
            Utils.matToBitmap(imgMAT, bitmapPicture);
          } else {
            // Log.i(TAG, "no filter");
          }

          startImageEdit(bitmapPicture);
        }
Example #5
0
  public static void main(String[] args) throws InterruptedException {

    // load the Core OpenCV library by name

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // create video capture device object

    VideoCapture cap = new VideoCapture();

    // try to use the hardware device if present

    int CAM_TO_USE = 0;

    // create new image objects

    Mat frame = new Mat();
    Mat foreground = new Mat();
    Mat fg_mask = new Mat();

    // create new Mixture of Gaussian BG model

    BackgroundSubtractorMOG MoG = new BackgroundSubtractorMOG();

    // try to open first capture device (0)

    try {
      cap.open(CAM_TO_USE);
    } catch (Exception e1) {
      System.out.println("No webcam attached");

      // otherwise try opening a video file

      try {
        cap.open("files/video.mp4");
      } catch (Exception e2) {
        System.out.println("No video file found");
      }
    }

    // if the a video capture source is now open

    if (cap.isOpened()) {
      // create new window objects

      Imshow imsS = new Imshow("from video Source ... ");
      Imshow imsF = new Imshow("Foreground");

      boolean keepProcessing = true;

      while (keepProcessing) {
        // grab and return the next frame from video source

        cap.grab();
        cap.retrieve(frame);

        // if the frame is valid (not end of video for example)

        if (!(frame.empty())) {

          // add it to the background model with a learning rate of 0.1

          MoG.apply(frame, fg_mask, 0.1);

          // extract the foreground mask (1 = foreground / 0 - background),
          // and convert/expand it to a 3-channel version of the same

          Imgproc.cvtColor(fg_mask, fg_mask, Imgproc.COLOR_GRAY2BGR);

          // logically AND it with the original frame to extract colour
          // pixel only in the foreground regions

          Core.bitwise_and(frame, fg_mask, foreground);

          // display image with a delay of 40ms (i.e. 1000 ms / 25 = 25 fps)

          imsS.showImage(frame);
          imsF.showImage(foreground);

          Thread.sleep(40);

        } else {
          keepProcessing = false;
        }
      }

    } else {
      System.out.println("error cannot open any capture source - exiting");
    }

    // close down the camera correctly

    cap.release();
  }
  private void detectObject() {

    readLock.lock();

    featureDetector.detect(img_scene, keypoints_scene);
    featureDetector.detect(img_object, keypoints_object);

    extractor.compute(img_object, keypoints_object, descriptors_object);
    extractor.compute(img_scene, keypoints_scene, descriptors_scene);

    readLock.unlock();

    if (!descriptors_scene.empty()) {
      matcher.match(descriptors_object, descriptors_scene, matches);

      // readLock.unlock();

      //
      listMatches = matches.toList();

      int size = descriptors_object.rows();

      // -- Quick calculation of max and min distances between keypoints
      for (int i = 0; i < size; i++) {
        double dist = listMatches.get(i).distance;
        if (dist < min_dist) {
          min_dist = dist;
        }
      }

      Log.e("Min", min_dist + "");

      threeMinDist = 3 * min_dist;

      listGoodMatches.removeAll(listGoodMatches);

      for (int i = 0; i < size; i++) {
        DMatch dMatch = listMatches.get(i);

        float distance = dMatch.distance;

        if (distance < threeMinDist) {
          listGoodMatches.add(dMatch);
        }
      }

      // good_matches.fromList(listGoodMatches);

      Log.e("Matches", listMatches.size() + "");
      Log.e("Good Matches", listGoodMatches.size() + "");
      //

      if (listGoodMatches.size() > 4) {
        Point pointObj[] = new Point[listGoodMatches.size()];
        Point pointScene[] = new Point[listGoodMatches.size()];

        listKeyPointObject = keypoints_object.toList();
        listKeyPointScene = keypoints_scene.toList();

        // listPointScene.removeAll(listPointScene);
        for (int i = 0; i < listGoodMatches.size(); i++) {
          // -- Get the keypoints from the good matches
          pointObj[i] = listKeyPointObject.get(listGoodMatches.get(i).queryIdx).pt;
          pointScene[i] = listKeyPointScene.get(listGoodMatches.get(i).trainIdx).pt;

          // listPointScene.add(listKeyPointScene.get(listGoodMatches.get(i).trainIdx).pt);
        }

        obj.fromArray(pointObj);
        scene.fromArray(pointScene);

        Log.e("Before findHomography", "");

        H = Calib3d.findHomography(obj, scene, Calib3d.RANSAC, 9);

        Log.e("AFTERRR findHomography", "");

        pointObjConners[0] = new Point(0, 0);
        pointObjConners[1] = new Point(img_object.cols(), 0);
        pointObjConners[2] = new Point(img_object.cols(), img_object.rows());
        pointObjConners[3] = new Point(0, img_object.rows());

        obj_corners.fromArray(pointObjConners);

        Core.perspectiveTransform(obj_corners, scene_corners, H);

        p0 = new Point(scene_corners.toList().get(0).x, scene_corners.toList().get(0).y + 0);
        p1 = new Point(scene_corners.toList().get(1).x, scene_corners.toList().get(1).y + 0);
        p2 = new Point(scene_corners.toList().get(2).x, scene_corners.toList().get(2).y + 0);
        p3 = new Point(scene_corners.toList().get(3).x, scene_corners.toList().get(3).y + 0);

        Log.e("POINT THREAD", p0.toString() + p1.toString() + p2.toString() + p3.toString());

        Log.e("detect ok", "detect ok");
      }

    } else {
      Log.e("No descritor", "No descritor");

      // readLock.unlock();
    }
  }