public void setupCamera(int width, int height) {
    Log.i(TAG, "setupCamera(" + width + ", " + height + ")");
    synchronized (this) {
      if (mCamera != null && mCamera.isOpened()) {
        List<Size> sizes = mCamera.getSupportedPreviewSizes();
        int mFrameWidth = width;
        int mFrameHeight = height;

        // selecting optimal camera preview size
        {
          double minDiff = Double.MAX_VALUE;
          for (Size size : sizes) {
            if (Math.abs(size.height - height) < minDiff) {
              mFrameWidth = (int) size.width;
              mFrameHeight = (int) size.height;
              minDiff = Math.abs(size.height - height);
            }
          }
        }

        mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
        mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
      }
    }
  }
  private void processFrameForMarkersDebug(VideoCapture capture) {
    ArrayList<MatOfPoint> components = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    // Get original image.
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    // Get gray scale image.
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

    // Get image segment to detect marker.
    Mat imgSegmentMat = cloneMarkerImageSegment(mGray);
    Mat thresholdedImgMat = new Mat(imgSegmentMat.size(), imgSegmentMat.type());
    applyThresholdOnImage(imgSegmentMat, thresholdedImgMat);
    copyThresholdedImageToRgbImgMat(thresholdedImgMat, mRgba);

    Scalar contourColor = new Scalar(0, 0, 255);
    Scalar codesColor = new Scalar(255, 0, 0, 255);

    displayMarkersDebug(thresholdedImgMat, contourColor, codesColor);
    // displayThresholds(mRgba, codesColor, localThresholds);
    displayRectOnImageSegment(mRgba, false);

    if (components != null) components.clear();
    if (hierarchy != null) hierarchy.release();
    components = null;
    hierarchy = null;
  }
  public void surfaceChanged(SurfaceHolder _holder, int format, int width, int height) {
    Log.i(TAG, "surfaceCreated");
    synchronized (this) {
      if (mCamera != null && mCamera.isOpened()) {
        Log.i(TAG, "before mCamera.getSupportedPreviewSizes()");
        List<Size> sizes = mCamera.getSupportedPreviewSizes();
        Log.i(TAG, "after mCamera.getSupportedPreviewSizes()");
        int mFrameWidth = width;
        int mFrameHeight = height;

        // selecting optimal camera preview size
        {
          double minDiff = Double.MAX_VALUE;
          for (Size size : sizes) {
            if (Math.abs(size.height - height) < minDiff) {
              mFrameWidth = (int) size.width;
              mFrameHeight = (int) size.height;
              minDiff = Math.abs(size.height - height);
            }
          }
        }

        mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
        mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
      }
    }
  }
 public void surfaceCreated(SurfaceHolder holder) {
   Log.i(TAG, "surfaceCreated");
   mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
   if (mCamera.isOpened()) {
     (new Thread(this)).start();
   } else {
     mCamera.release();
     mCamera = null;
     Log.e(TAG, "Failed to open native camera");
   }
 }
예제 #5
0
파일: VisionTest.java 프로젝트: vkee/maslab
  public static void main(String args[]) {
    // Load the OpenCV library
    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // Setup the camera
    VideoCapture camera = new VideoCapture();
    camera.open(0);

    // Create GUI windows to display camera output and OpenCV output
    int width = 320;
    int height = 240;
    camera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, width);
    camera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, height);
    JLabel cameraPane = createWindow("Camera output", width, height);
    JLabel opencvPane = createWindow("OpenCV output", width, height);

    // Main loop
    Mat rawImage = new Mat();
    while (true) {
      // Wait until the camera has a new frame
      while (!camera.read(rawImage)) {
        try {
          Thread.sleep(1);
        } catch (InterruptedException e) {
          e.printStackTrace();
        }
      }

      // Process the image however you like
      Mat binary = Detection.detectHueRange(rawImage);
      org.opencv.core.Point center = Detection.nextCenter(binary, width / 2, height / 2, 5);
      Mat processedImage = Detection.convertC(binary);
      System.out.println(center.x);
      System.out.println(center.y);

      //            Mat lines = Detection.detectEdges(rawImage, 80, 3);
      //            List<org.opencv.core.Point> edges = Detection.findWallEdges(lines, rawImage,
      // 25);
      //            Detection.drawLines(binary, edges);
      //            Mat processedImage = Detection.convertC(binary);
      Mat edges = Detection.contourImage(rawImage, 150, 3);
      Detection.hueEdges(rawImage, edges);
      Mat edgesC = Detection.convertC(edges);
      //            List<org.opencv.core.Point> lines = Detection.hueLines(edges);
      //            Detection.drawLines(binary, lines);
      //            Mat processedImage = Detection.convertC(binary);

      // Update the GUI windows
      updateWindow(cameraPane, rawImage);
      //            updateWindow(opencvPane, processedImage);
      updateWindow(opencvPane, edgesC);
    }
  }
 public boolean openCamera() {
   Log.i(TAG, "openCamera");
   synchronized (this) {
     releaseCamera();
     mCamera = new VideoCapture(Highgui.CV_CAP_ANDROID);
     if (!mCamera.isOpened()) {
       mCamera.release();
       mCamera = null;
       Log.e(TAG, "Failed to open native camera");
       return false;
     }
   }
   return true;
 }
  public void run() {
    Log.i(TAG, "Starting processing thread");
    while (true) {
      Bitmap bmp = null;

      synchronized (this) {
        if (mCamera == null) break;

        if (!mCamera.grab()) {
          Log.e(TAG, "mCamera.grab() failed");
          break;
        }

        bmp = processFrame(mCamera);
      }

      if (bmp != null) {
        Canvas canvas = mHolder.lockCanvas();
        if (canvas != null) {
          canvas.drawBitmap(
              bmp,
              (canvas.getWidth() - bmp.getWidth()) / 2,
              (canvas.getHeight() - bmp.getHeight()) / 2,
              null);
          mHolder.unlockCanvasAndPost(canvas);
        }
        //                bmp.recycle();
      }
    }

    Log.i(TAG, "Finishing processing thread");
  }
 public void surfaceDestroyed(SurfaceHolder holder) {
   Log.i(TAG, "surfaceDestroyed");
   if (mCamera != null) {
     synchronized (this) {
       mCamera.release();
       mCamera = null;
     }
   }
 }
 public void releaseCamera() {
   Log.i(TAG, "releaseCamera");
   synchronized (this) {
     if (mCamera != null) {
       mCamera.release();
       mCamera = null;
     }
   }
 }
 // Methode du bouton stop : stop la video
 private void stop() {
   if (begin) {
     try {
       Thread.sleep(500);
     } catch (Exception ex) {
       ex.printStackTrace();
     }
     video.release();
     begin = false;
   }
 }
예제 #11
0
  /** Capture images and run color processing through here */
  public void capture() {
    VideoCapture camera = new VideoCapture();

    camera.set(12, -20); // change contrast, might not be necessary

    // CaptureImage image = new CaptureImage();

    camera.open(0); // Useless
    if (!camera.isOpened()) {
      System.out.println("Camera Error");

      // Determine whether to use System.exit(0) or return

    } else {
      System.out.println("Camera OK");
    }

    boolean success = camera.read(capturedFrame);
    if (success) {
      try {
        processWithContours(capturedFrame, processedFrame);
      } catch (Exception e) {
        System.out.println(e);
      }
      // image.processFrame(capturedFrame, processedFrame);
      // processedFrame should be CV_8UC3

      // image.findCaptured(processedFrame);

      // image.determineKings(capturedFrame);

      int bufferSize = processedFrame.channels() * processedFrame.cols() * processedFrame.rows();
      byte[] b = new byte[bufferSize];

      processedFrame.get(0, 0, b); // get all the pixels
      // This might need to be BufferedImage.TYPE_INT_ARGB
      img =
          new BufferedImage(
              processedFrame.cols(), processedFrame.rows(), BufferedImage.TYPE_INT_RGB);
      int width = (int) camera.get(Highgui.CV_CAP_PROP_FRAME_WIDTH);
      int height = (int) camera.get(Highgui.CV_CAP_PROP_FRAME_HEIGHT);
      // img.getRaster().setDataElements(0, 0, width, height, b);
      byte[] a = new byte[bufferSize];
      System.arraycopy(b, 0, a, 0, bufferSize);

      Highgui.imwrite("camera.jpg", processedFrame);
      System.out.println("Success");
    } else System.out.println("Unable to capture image");

    camera.release();
  }
  // Méthode du bouton start : lance la vidéo
  private void start() {
    if (!begin) {

      video = new VideoCapture(0);

      if (video.isOpened()) {
        thread = new CaptureThread();
        thread.start();
        begin = true;
        firstFrame = true;
      }
    }
  }
  private void processFrameForMarkersFull(VideoCapture capture, DtouchMarker marker) {
    ArrayList<MatOfPoint> components = new ArrayList<MatOfPoint>();
    Mat hierarchy = new Mat();

    // Get original image.
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    // Get gray scale image.
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
    // Get image segment to detect marker.
    markerPosition = calculateImageSegmentArea(mGray);
    Mat imgSegmentMat = cloneMarkerImageSegment(mGray);
    // apply threshold.
    Mat thresholdedImgMat = new Mat(imgSegmentMat.size(), imgSegmentMat.type());
    applyThresholdOnImage(imgSegmentMat, thresholdedImgMat);
    imgSegmentMat.release();
    // find markers.
    boolean markerFound = findMarkers(thresholdedImgMat, marker, components, hierarchy);
    thresholdedImgMat.release();
    // Marker detected.
    if (markerFound) {
      setMarkerDetected(true);
      // if marker is found then copy the marker image segment.
      mMarkerImage = cloneMarkerImageSegment(mRgba);
      // display codes on the original image.
      // displayMarkerCodes(mRgba, markers);
      // display rect with indication that a marker is identified.
      displayRectOnImageSegment(mRgba, true);
      // display marker image
      displayMarkerImage(mMarkerImage, mRgba);
    } else displayRectOnImageSegment(mRgba, false);

    if (components != null) components.clear();
    if (hierarchy != null) hierarchy.release();
    components = null;
    hierarchy = null;
  }
  private Bitmap displayDetectedMarker(VideoCapture capture, Mat markerImage) {
    // Get original image.
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    displayRectOnImageSegment(mRgba, true);
    displayMarkerImage(mMarkerImage, mRgba);

    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);
    try {
      Utils.matToBitmap(mRgba, bmp);
      return bmp;
    } catch (Exception e) {
      Log.e("TWMarkerSurfaceView", "Utils.matToBitmap() throws an exception: " + e.getMessage());
      bmp.recycle();
      return null;
    }
  }
  public void setupCamera(int width, int height) {
    Log.i(TAG, "setupCamera out(" + width + ", " + height + ")");
    synchronized (this) {
      if (mCamera != null && mCamera.isOpened()) {
        List<Size> sizes = mCamera.getSupportedPreviewSizes();
        int mFrameWidth = width;
        int mFrameHeight = height;

        // selecting optimal camera preview size
        {
          double minDiff = Double.MAX_VALUE;
          for (Size size : sizes) {
            if ((Math.abs(size.height - height) + Math.abs(size.width - width)) < minDiff) {
              mFrameWidth = (int) size.width;
              mFrameHeight = (int) size.height;
              minDiff = Math.abs(size.height - height);
            }
          }
        }

        mCamera.set(
            Highgui.CV_CAP_PROP_ANDROID_ANTIBANDING, Highgui.CV_CAP_ANDROID_ANTIBANDING_OFF);
        mCamera.set(Highgui.CV_CAP_PROP_ANDROID_FLASH_MODE, Highgui.CV_CAP_ANDROID_FLASH_MODE_OFF);
        mCamera.set(
            Highgui.CV_CAP_PROP_ANDROID_FOCUS_MODE,
            Highgui.CV_CAP_ANDROID_FOCUS_MODE_CONTINUOUS_VIDEO);
        mCamera.set(
            Highgui.CV_CAP_PROP_ANDROID_WHITE_BALANCE,
            Highgui.CV_CAP_ANDROID_WHITE_BALANCE_FLUORESCENT);
        //                mCamera.set(Highgui.CV_CAP_PROP_IOS_DEVICE_EXPOSURE,
        //                Log.i(TAG, "setupCamera 6: " +
        // mCamera.get(Highgui.CV_CAP_PROP_IOS_DEVICE_EXPOSURE));

        mCamera.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, mFrameWidth);
        mCamera.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, mFrameHeight);
      }
    }
  }
  public void run() {
    Log.i(TAG, "Starting processing thread");
    while (true) {
      Bitmap bmp = null;

      synchronized (this) {
        if (mCamera == null) break;

        if (!mCamera.grab()) {
          Log.e(TAG, "mCamera.grab() failed");
          break;
        }

        bmp = processFrame(mCamera);
      }

      if (bmp != null) {
        Canvas canvas = mHolder.lockCanvas();
        if (canvas != null) {
          canvas.drawBitmap(
              bmp,
              (canvas.getWidth() - bmp.getWidth()) / 2,
              (canvas.getHeight() - bmp.getHeight()) / 2,
              null);
          /*
          //Change to support portrait view
             Matrix matrix = new Matrix();
             matrix.preTranslate((canvas.getWidth() - bmp.getWidth()) / 2, (canvas.getHeight() - bmp.getHeight()) / 2);
             if(getResources().getConfiguration().orientation ==                                                                                           Configuration.ORIENTATION_PORTRAIT)
             matrix.postRotate(90f,(canvas.getWidth()) / 2,(canvas.getHeight()) / 2);
             canvas.drawBitmap(bmp, matrix, new Paint());
             */
          mHolder.unlockCanvasAndPost(canvas);
        }
        bmp.recycle();
      }
    }

    Log.i(TAG, "Finishing processing thread");
  }
예제 #17
0
  /* (non-Javadoc)
   * @see java.lang.Runnable#run()
   */
  @Override
  public void run() {
    if (MODE.equals("VIDEO")) {
      Mat capturedImage = new Mat();
      VideoCapture vc = new VideoCapture(DEVICE);
      if (!vc.isOpened()) {
        System.out.println("Capture Failed!");
        return;
      }
      System.out.println("Device " + DEVICE + " opened");
      // set captured resolution
      vc.set(Highgui.CV_CAP_PROP_FRAME_WIDTH, 640);
      vc.set(Highgui.CV_CAP_PROP_FRAME_HEIGHT, 480);

      // Manually set exposure
      vc.set(15, -11);
      while (true) {
        vc.read(capturedImage);
        if (capturedImage != null) {
          // flip the image to compensate for camera orientation
          Core.flip(capturedImage, capturedImage, -1);
          capturedImage.copyTo(finalDisplayImg);
          parseImage(capturedImage);
        }
      }
    } else { // STILL IMAGE
      Mat capturedImage = Highgui.imread(IMAGE_FILEPATH);
      while (true) {
        if (needUpdate) {
          capturedImage.copyTo(finalDisplayImg);
          parseImage(capturedImage);
          needUpdate = false;
        }
      }
    }
  }
예제 #18
0
  public static void main(String[] args) throws InterruptedException {

    // load the Core OpenCV library by name

    System.loadLibrary(Core.NATIVE_LIBRARY_NAME);

    // create video capture device object

    VideoCapture cap = new VideoCapture();

    // try to use the hardware device if present

    int CAM_TO_USE = 0;

    // create new image objects

    Mat frame = new Mat();
    Mat foreground = new Mat();
    Mat fg_mask = new Mat();

    // create new Mixture of Gaussian BG model

    BackgroundSubtractorMOG MoG = new BackgroundSubtractorMOG();

    // try to open first capture device (0)

    try {
      cap.open(CAM_TO_USE);
    } catch (Exception e1) {
      System.out.println("No webcam attached");

      // otherwise try opening a video file

      try {
        cap.open("files/video.mp4");
      } catch (Exception e2) {
        System.out.println("No video file found");
      }
    }

    // if the a video capture source is now open

    if (cap.isOpened()) {
      // create new window objects

      Imshow imsS = new Imshow("from video Source ... ");
      Imshow imsF = new Imshow("Foreground");

      boolean keepProcessing = true;

      while (keepProcessing) {
        // grab and return the next frame from video source

        cap.grab();
        cap.retrieve(frame);

        // if the frame is valid (not end of video for example)

        if (!(frame.empty())) {

          // add it to the background model with a learning rate of 0.1

          MoG.apply(frame, fg_mask, 0.1);

          // extract the foreground mask (1 = foreground / 0 - background),
          // and convert/expand it to a 3-channel version of the same

          Imgproc.cvtColor(fg_mask, fg_mask, Imgproc.COLOR_GRAY2BGR);

          // logically AND it with the original frame to extract colour
          // pixel only in the foreground regions

          Core.bitwise_and(frame, fg_mask, foreground);

          // display image with a delay of 40ms (i.e. 1000 ms / 25 = 25 fps)

          imsS.showImage(frame);
          imsF.showImage(foreground);

          Thread.sleep(40);

        } else {
          keepProcessing = false;
        }
      }

    } else {
      System.out.println("error cannot open any capture source - exiting");
    }

    // close down the camera correctly

    cap.release();
  }
예제 #19
0
파일: FdView.java 프로젝트: nagyist/XFace
  @Override
  protected Bitmap processFrame(VideoCapture capture) {
    capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
    capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);

    if (mAbsoluteFaceSize == 0) {
      int height = mGray.rows();
      if (Math.round(height * mRelativeFaceSize) > 0) ;
      {
        mAbsoluteFaceSize = Math.round(height * mRelativeFaceSize);
      }
      mNativeDetector.setMinFaceSize(mAbsoluteFaceSize);
    }

    MatOfRect faces = new MatOfRect();

    if (mDetectorType == JAVA_DETECTOR) {
      if (mJavaDetector != null)
        mJavaDetector.detectMultiScale(
            mGray,
            faces,
            1.1,
            2,
            2 // TODO: objdetect.CV_HAAR_SCALE_IMAGE
            ,
            new Size(mAbsoluteFaceSize, mAbsoluteFaceSize),
            new Size());

      if (mZoomCorner == null || mZoomWindow == null) CreateAuxiliaryMats();

      Rect[] facesArray = faces.toArray();

      for (int i = 0; i < facesArray.length; i++) {
        Rect r = facesArray[i];
        Core.rectangle(mGray, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);
        Core.rectangle(mRgba, r.tl(), r.br(), new Scalar(0, 255, 0, 255), 3);

        eyearea =
            new Rect(
                r.x + r.width / 8,
                (int) (r.y + (r.height / 4.5)),
                r.width - 2 * r.width / 8,
                (int) (r.height / 3.0));
        Core.rectangle(mRgba, eyearea.tl(), eyearea.br(), new Scalar(255, 0, 0, 255), 2);
        Rect eyearea_right =
            new Rect(
                r.x + r.width / 16,
                (int) (r.y + (r.height / 4.5)),
                (r.width - 2 * r.width / 16) / 2,
                (int) (r.height / 3.0));
        Rect eyearea_left =
            new Rect(
                r.x + r.width / 16 + (r.width - 2 * r.width / 16) / 2,
                (int) (r.y + (r.height / 4.5)),
                (r.width - 2 * r.width / 16) / 2,
                (int) (r.height / 3.0));
        Core.rectangle(mRgba, eyearea_left.tl(), eyearea_left.br(), new Scalar(255, 0, 0, 255), 2);
        Core.rectangle(
            mRgba, eyearea_right.tl(), eyearea_right.br(), new Scalar(255, 0, 0, 255), 2);

        if (learn_frames < 5) {
          teplateR = get_template(mCascadeER, eyearea_right, 24);
          teplateL = get_template(mCascadeEL, eyearea_left, 24);
          learn_frames++;
        } else {

          match_value = match_eye(eyearea_right, teplateR, FdActivity.method);

          match_value = match_eye(eyearea_left, teplateL, FdActivity.method);
          ;
        }
        Imgproc.resize(mRgba.submat(eyearea_left), mZoomWindow2, mZoomWindow2.size());
        Imgproc.resize(mRgba.submat(eyearea_right), mZoomWindow, mZoomWindow.size());
      }
    } else if (mDetectorType == NATIVE_DETECTOR) {
      if (mNativeDetector != null) mNativeDetector.detect(mGray, faces);
    } else {
      Log.e(TAG, "Detection method is not selected!");
    }

    Rect[] facesArray = faces.toArray();
    for (int i = 0; i < facesArray.length; i++)
      Core.rectangle(mRgba, facesArray[i].tl(), facesArray[i].br(), FACE_RECT_COLOR, 3);

    Bitmap bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

    try {
      Utils.matToBitmap(mRgba, bmp);
    } catch (Exception e) {
      Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
      bmp.recycle();
      bmp = null;
    }

    return bmp;
  }
예제 #20
0
  @Override
  protected Bitmap processFrame(VideoCapture capture) {
    Time[] measureTime = new Time[9];
    String[] compDescStrings = {
      "Total processFrame",
      "Grab a new frame",
      "MatToBitmap",
      "Publish cameraInfo",
      "Create ImageMsg",
      "Compress image",
      "Transfer to Stream",
      "Image.SetData",
      "Publish Image",
      "Total econds per frame"
    };
    String[] rawDescStrings = {
      "Total processFrame",
      "Grab a new frame",
      "MatToBitmap",
      "Publish cameraInfo",
      "Create ImageMsg",
      "Pixel to buffer",
      "Transfer to Stream",
      "Image.SetData",
      "Publish Image",
      "Total seconds per frame"
    };

    measureTime[0] = connectedNode.getCurrentTime();

    switch (MainActivity.viewMode) {
      case MainActivity.VIEW_MODE_GRAY:
        //	            capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
        capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_GREY_FRAME);
        //	            Imgproc.cvtColor(mGray, mRgba, Imgproc.COLOR_GRAY2RGBA, 4);
        break;
      case MainActivity.VIEW_MODE_RGBA:
        capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA);
        //            Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3, 2, new
        // Scalar(255, 0, 0, 255), 3);
        break;
      case MainActivity.VIEW_MODE_CANNY:
        capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME);
        Imgproc.Canny(mGray, mIntermediateMat, 80, 100);
        Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4);
        break;
    }
    Time currentTime = connectedNode.getCurrentTime();

    measureTime[1] = connectedNode.getCurrentTime();

    if (bmp == null) bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888);

    if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_NONE
        && bb == null) {
      Log.i(TAG, "Buffer 1");
      bb = ByteBuffer.allocate(bmp.getRowBytes() * bmp.getHeight());
      Log.i(TAG, "Buffer 2");
      bb.clear();
      Log.i(TAG, "Buffer 3");
    }
    try {
      Utils.matToBitmap(mRgba, bmp);
      measureTime[2] = connectedNode.getCurrentTime();

      cameraInfo = cameraInfoPublisher.newMessage();
      cameraInfo.getHeader().setFrameId("camera");
      cameraInfo.getHeader().setStamp(currentTime);
      cameraInfo.setWidth(640);
      cameraInfo.setHeight(480);
      cameraInfoPublisher.publish(cameraInfo);
      measureTime[3] = connectedNode.getCurrentTime();

      if (MainActivity.imageCompression >= MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG) {
        // Compressed image

        sensor_msgs.CompressedImage image = imagePublisher.newMessage();
        if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG)
          image.setFormat("png");
        else if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_JPEG)
          image.setFormat("jpeg");
        image.getHeader().setStamp(currentTime);
        image.getHeader().setFrameId("camera");
        measureTime[4] = connectedNode.getCurrentTime();

        ByteArrayOutputStream baos = new ByteArrayOutputStream();
        if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG)
          bmp.compress(Bitmap.CompressFormat.PNG, 100, baos);
        else if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_JPEG)
          bmp.compress(Bitmap.CompressFormat.JPEG, MainActivity.imageCompressionQuality, baos);
        measureTime[5] = connectedNode.getCurrentTime();

        stream.buffer().writeBytes(baos.toByteArray());
        measureTime[6] = connectedNode.getCurrentTime();

        image.setData(stream.buffer().copy());
        measureTime[7] = connectedNode.getCurrentTime();

        stream.buffer().clear();
        imagePublisher.publish(image);
        measureTime[8] = connectedNode.getCurrentTime();
      } else {
        // Raw image

        Log.i(TAG, "Raw image 1");
        sensor_msgs.Image rawImage = rawImagePublisher.newMessage();
        rawImage.getHeader().setStamp(currentTime);
        rawImage.getHeader().setFrameId("camera");
        rawImage.setEncoding("rgba8");
        rawImage.setWidth(bmp.getWidth());
        rawImage.setHeight(bmp.getHeight());
        rawImage.setStep(640);
        measureTime[4] = connectedNode.getCurrentTime();

        Log.i(TAG, "Raw image 2");

        bmp.copyPixelsToBuffer(bb);
        measureTime[5] = connectedNode.getCurrentTime();

        Log.i(TAG, "Raw image 3");

        stream.buffer().writeBytes(bb.array());
        bb.clear();
        measureTime[6] = connectedNode.getCurrentTime();

        Log.i(TAG, "Raw image 4");

        rawImage.setData(stream.buffer().copy());
        stream.buffer().clear();
        measureTime[7] = connectedNode.getCurrentTime();

        Log.i(TAG, "Raw image 5");

        rawImagePublisher.publish(rawImage);
        measureTime[8] = connectedNode.getCurrentTime();
        Log.i(TAG, "Raw image 6");
      }

      newTime = connectedNode.getCurrentTime();
      stats[9][counter] = (newTime.subtract(oldTime)).nsecs / 1000000.0;
      oldTime = newTime;

      for (int i = 1; i < 9; i++) {
        stats[i][counter] = (measureTime[i].subtract(measureTime[i - 1])).nsecs / 1000000.0;
      }

      stats[0][counter] = measureTime[8].subtract(measureTime[0]).nsecs / 1000000.0;

      counter++;
      if (counter == numSamples) {
        double[] sts = new double[10];
        Arrays.fill(sts, 0.0);

        for (int i = 0; i < 10; i++) {
          for (int j = 0; j < numSamples; j++) sts[i] += stats[i][j];

          sts[i] /= (double) numSamples;

          if (MainActivity.imageCompression >= MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG)
            Log.i(TAG, String.format("Mean time for %s:\t\t%4.2fms", compDescStrings[i], sts[i]));
          else Log.i(TAG, String.format("Mean time for %s:\t\t%4.2fms", rawDescStrings[i], sts[i]));
        }
        Log.i(TAG, "\n\n");
        counter = 0;
      }

      return bmp;
    } catch (Exception e) {
      Log.e(TAG, "Frame conversion and publishing throws an exception: " + e.getMessage());
      bmp.recycle();
      return null;
    }
  }