Beispiel #1
0
  public static void main(String args[]) {
    // Load the OpenCV library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // Setup the camera
    VideoCapture camera = new VideoCapture();
    camera.open(0);

    // Create GUI windows to display camera output and OpenCV output
    int width = 320;
    int height = 240;
    camera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, width);
    camera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, height);
    JLabel cameraPane = createWindow("Camera output", width, height);
    JLabel opencvPane = createWindow("OpenCV output", width, height);

    // Main loop
    Mat rawImage = new Mat();
    while (true) {
      // Wait until the camera has a new frame
      while (!camera.read(rawImage)) {
        try {
          Thread.sleep(1);
        } catch (InterruptedException e) {
          e.printStackTrace();
        }
      }

      // Process the image however you like
      Mat binary = Detection.detectHueRange(rawImage);
      org.opencv.core.Point center = Detection.nextCenter(binary, width / 2, height / 2, 5);
      Mat processedImage = Detection.convertC(binary);
      System.out.println(center.x);
      System.out.println(center.y);

      //            Mat lines = Detection.detectEdges(rawImage, 80, 3);
      //            List<org.opencv.core.Point> edges = Detection.findWallEdges(lines, rawImage,
      // 25);
      //            Detection.drawLines(binary, edges);
      //            Mat processedImage = Detection.convertC(binary);
      Mat edges = Detection.contourImage(rawImage, 150, 3);
      Detection.hueEdges(rawImage, edges);
      Mat edgesC = Detection.convertC(edges);
      //            List<org.opencv.core.Point> lines = Detection.hueLines(edges);
      //            Detection.drawLines(binary, lines);
      //            Mat processedImage = Detection.convertC(binary);

      // Update the GUI windows
      updateWindow(cameraPane, rawImage);
      //            updateWindow(opencvPane, processedImage);
      updateWindow(opencvPane, edgesC);
    }
  }
Beispiel #2
0
  /** Capture images and run color processing through here */
  public void capture() {
    VideoCapture camera = new VideoCapture();

    camera.set(12, -20); // change contrast, might not be necessary

    // CaptureImage image = new CaptureImage();

    camera.open(0); // Useless
    if (!camera.isOpened()) {
      System.out.println("Camera Error");

      // Determine whether to use System.exit(0) or return

    } else {
      System.out.println("Camera OK");
    }

    boolean success = camera.read(capturedFrame);
    if (success) {
      try {
        processWithContours(capturedFrame, processedFrame);
      } catch (Exception e) {
        System.out.println(e);
      }
      // image.processFrame(capturedFrame, processedFrame);
      // processedFrame should be CV_8UC3

      // image.findCaptured(processedFrame);

      // image.determineKings(capturedFrame);

      int bufferSize = processedFrame.channels() * processedFrame.cols() * processedFrame.rows();
      byte[] b = new byte[bufferSize];

      processedFrame.get(0, 0, b); // get all the pixels
      // This might need to be BufferedImage.TYPE_INT_ARGB
      img =
          new BufferedImage(
              processedFrame.cols(), processedFrame.rows(), BufferedImage.TYPE_INT_RGB);
      int width = (int) camera.get(Highgui.CV_CAP_PROP_FRAME_WIDTH);
      int height = (int) camera.get(Highgui.CV_CAP_PROP_FRAME_HEIGHT);
      // img.getRaster().setDataElements(0, 0, width, height, b);
      byte[] a = new byte[bufferSize];
      System.arraycopy(b, 0, a, 0, bufferSize);

      Highgui.imwrite("camera.jpg", processedFrame);
      System.out.println("Success");
    } else System.out.println("Unable to capture image");

    camera.release();
  }
Beispiel #3
0
  public static void main(String[] args) throws InterruptedException {

    // load the Core OpenCV library by name

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // create video capture device object

    VideoCapture cap = new VideoCapture();

    // try to use the hardware device if present

    int CAM_TO_USE = 0;

    // create new image objects

    Mat frame = new Mat();
    Mat foreground = new Mat();
    Mat fg_mask = new Mat();

    // create new Mixture of Gaussian BG model

    BackgroundSubtractorMOG MoG = new BackgroundSubtractorMOG();

    // try to open first capture device (0)

    try {
      cap.open(CAM_TO_USE);
    } catch (Exception e1) {
      System.out.println("No webcam attached");

      // otherwise try opening a video file

      try {
        cap.open("files/video.mp4");
      } catch (Exception e2) {
        System.out.println("No video file found");
      }
    }

    // if the a video capture source is now open

    if (cap.isOpened()) {
      // create new window objects

      Imshow imsS = new Imshow("from video Source ... ");
      Imshow imsF = new Imshow("Foreground");

      boolean keepProcessing = true;

      while (keepProcessing) {
        // grab and return the next frame from video source

        cap.grab();
        cap.retrieve(frame);

        // if the frame is valid (not end of video for example)

        if (!(frame.empty())) {

          // add it to the background model with a learning rate of 0.1

          MoG.apply(frame, fg_mask, 0.1);

          // extract the foreground mask (1 = foreground / 0 - background),
          // and convert/expand it to a 3-channel version of the same

          Imgproc.cvtColor(fg_mask, fg_mask, Imgproc.COLOR_GRAY2BGR);

          // logically AND it with the original frame to extract colour
          // pixel only in the foreground regions

          Core.bitwise_and(frame, fg_mask, foreground);

          // display image with a delay of 40ms (i.e. 1000 ms / 25 = 25 fps)

          imsS.showImage(frame);
          imsF.showImage(foreground);

          Thread.sleep(40);

        } else {
          keepProcessing = false;
        }
      }

    } else {
      System.out.println("error cannot open any capture source - exiting");
    }

    // close down the camera correctly

    cap.release();
  }