コード例 #1
0
 public void releaseCamera() {
   Log.i(TAG, "releaseCamera");
   synchronized (this) {
     if (mCamera != null) {
       mCamera.release();
       mCamera = null;
     }
   }
 }
コード例 #2
0
 public void surfaceDestroyed(SurfaceHolder holder) {
   Log.i(TAG, "surfaceDestroyed");
   if (mCamera != null) {
     synchronized (this) {
       mCamera.release();
       mCamera = null;
     }
   }
 }
コード例 #3
0
 // Methode du bouton stop : stop la video
 private void stop() {
   if (begin) {
     try {
       Thread.sleep(500);
     } catch (Exception ex) {
       ex.printStackTrace();
     }
     video.release();
     begin = false;
   }
 }
コード例 #4
0
 public void surfaceCreated(SurfaceHolder holder) {
   Log.i(TAG, "surfaceCreated");
   mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
   if (mCamera.isOpened()) {
     (new Thread(this)).start();
   } else {
     mCamera.release();
     mCamera = null;
     Log.e(TAG, "Failed to open native camera");
   }
 }
コード例 #5
0
ファイル: CaptureImage.java プロジェクト: bmalla6920/Chequers
  /** Capture images and run color processing through here */
  public void capture() {
    VideoCapture camera = new VideoCapture();

    camera.set(12, -20); // change contrast, might not be necessary

    // CaptureImage image = new CaptureImage();

    camera.open(0); // Useless
    if (!camera.isOpened()) {
      System.out.println("Camera Error");

      // Determine whether to use System.exit(0) or return

    } else {
      System.out.println("Camera OK");
    }

    boolean success = camera.read(capturedFrame);
    if (success) {
      try {
        processWithContours(capturedFrame, processedFrame);
      } catch (Exception e) {
        System.out.println(e);
      }
      // image.processFrame(capturedFrame, processedFrame);
      // processedFrame should be CV_8UC3

      // image.findCaptured(processedFrame);

      // image.determineKings(capturedFrame);

      int bufferSize = processedFrame.channels() * processedFrame.cols() * processedFrame.rows();
      byte[] b = new byte[bufferSize];

      processedFrame.get(0, 0, b); // get all the pixels
      // This might need to be BufferedImage.TYPE_INT_ARGB
      img =
          new BufferedImage(
              processedFrame.cols(), processedFrame.rows(), BufferedImage.TYPE_INT_RGB);
      int width = (int) camera.get(Highgui.CV_CAP_PROP_FRAME_WIDTH);
      int height = (int) camera.get(Highgui.CV_CAP_PROP_FRAME_HEIGHT);
      // img.getRaster().setDataElements(0, 0, width, height, b);
      byte[] a = new byte[bufferSize];
      System.arraycopy(b, 0, a, 0, bufferSize);

      Highgui.imwrite("camera.jpg", processedFrame);
      System.out.println("Success");
    } else System.out.println("Unable to capture image");

    camera.release();
  }
コード例 #6
0
 public boolean openCamera() {
   Log.i(TAG, "openCamera");
   synchronized (this) {
     releaseCamera();
     mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
     if (!mCamera.isOpened()) {
       mCamera.release();
       mCamera = null;
       Log.e(TAG, "Failed to open native camera");
       return false;
     }
   }
   return true;
 }
コード例 #7
0
ファイル: backgroundModel.java プロジェクト: DanielYTL/MC
  public static void main(String[] args) throws InterruptedException {

    // load the Core OpenCV library by name

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // create video capture device object

    VideoCapture cap = new VideoCapture();

    // try to use the hardware device if present

    int CAM_TO_USE = 0;

    // create new image objects

    Mat frame = new Mat();
    Mat foreground = new Mat();
    Mat fg_mask = new Mat();

    // create new Mixture of Gaussian BG model

    BackgroundSubtractorMOG MoG = new BackgroundSubtractorMOG();

    // try to open first capture device (0)

    try {
      cap.open(CAM_TO_USE);
    } catch (Exception e1) {
      System.out.println("No webcam attached");

      // otherwise try opening a video file

      try {
        cap.open("files/video.mp4");
      } catch (Exception e2) {
        System.out.println("No video file found");
      }
    }

    // if the a video capture source is now open

    if (cap.isOpened()) {
      // create new window objects

      Imshow imsS = new Imshow("from video Source ... ");
      Imshow imsF = new Imshow("Foreground");

      boolean keepProcessing = true;

      while (keepProcessing) {
        // grab and return the next frame from video source

        cap.grab();
        cap.retrieve(frame);

        // if the frame is valid (not end of video for example)

        if (!(frame.empty())) {

          // add it to the background model with a learning rate of 0.1

          MoG.apply(frame, fg_mask, 0.1);

          // extract the foreground mask (1 = foreground / 0 - background),
          // and convert/expand it to a 3-channel version of the same

          Imgproc.cvtColor(fg_mask, fg_mask, Imgproc.COLOR_GRAY2BGR);

          // logically AND it with the original frame to extract colour
          // pixel only in the foreground regions

          Core.bitwise_and(frame, fg_mask, foreground);

          // display image with a delay of 40ms (i.e. 1000 ms / 25 = 25 fps)

          imsS.showImage(frame);
          imsF.showImage(foreground);

          Thread.sleep(40);

        } else {
          keepProcessing = false;
        }
      }

    } else {
      System.out.println("error cannot open any capture source - exiting");
    }

    // close down the camera correctly

    cap.release();
  }