public static BufferedImage bufferedImageFromRosMessageRaw( ColorModel colorModel, sensor_msgs.Image imageMessage) { int width = imageMessage.getWidth(); int height = imageMessage.getHeight(); byte[] payload = imageMessage.getData().array(); BufferedImage ret = new BufferedImage(width, height, BufferedImage.TYPE_3BYTE_BGR); DataBuffer dataBuffer = new DataBufferByte(payload, payload.length, imageMessage.getData().arrayOffset()); SampleModel sampleModel = colorModel.createCompatibleSampleModel(width, height); WritableRaster raster = Raster.createWritableRaster(sampleModel, dataBuffer, null); ret.setData(raster); return ret; }
@Override public Bitmap call(sensor_msgs.Image message) { Preconditions.checkArgument(message.getEncoding().equals("rgb8")); Bitmap bitmap = Bitmap.createBitmap( (int) message.getWidth(), (int) message.getHeight(), Bitmap.Config.ARGB_8888); for (int x = 0; x < message.getWidth(); x++) { for (int y = 0; y < message.getHeight(); y++) { ChannelBuffer data = message.getData(); byte red = data.getByte((int) (y * message.getStep() + 3 * x)); byte green = data.getByte((int) (y * message.getStep() + 3 * x + 1)); byte blue = data.getByte((int) (y * message.getStep() + 3 * x + 2)); bitmap.setPixel(x, y, Color.argb(255, red & 0xFF, green & 0xFF, blue & 0xFF)); } } return bitmap; }
@Override protected Bitmap processFrame(VideoCapture capture) { Time[] measureTime = new Time[9]; String[] compDescStrings = { "Total processFrame", "Grab a new frame", "MatToBitmap", "Publish cameraInfo", "Create ImageMsg", "Compress image", "Transfer to Stream", "Image.SetData", "Publish Image", "Total econds per frame" }; String[] rawDescStrings = { "Total processFrame", "Grab a new frame", "MatToBitmap", "Publish cameraInfo", "Create ImageMsg", "Pixel to buffer", "Transfer to Stream", "Image.SetData", "Publish Image", "Total seconds per frame" }; measureTime[0] = connectedNode.getCurrentTime(); switch (MainActivity.viewMode) { case MainActivity.VIEW_MODE_GRAY: // capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_GREY_FRAME); // Imgproc.cvtColor(mGray, mRgba, Imgproc.COLOR_GRAY2RGBA, 4); break; case MainActivity.VIEW_MODE_RGBA: capture.retrieve(mRgba, Highgui.CV_CAP_ANDROID_COLOR_FRAME_RGBA); // Core.putText(mRgba, "OpenCV + Android", new Point(10, 100), 3, 2, new // Scalar(255, 0, 0, 255), 3); break; case MainActivity.VIEW_MODE_CANNY: capture.retrieve(mGray, Highgui.CV_CAP_ANDROID_GREY_FRAME); Imgproc.Canny(mGray, mIntermediateMat, 80, 100); Imgproc.cvtColor(mIntermediateMat, mRgba, Imgproc.COLOR_GRAY2BGRA, 4); break; } Time currentTime = connectedNode.getCurrentTime(); measureTime[1] = connectedNode.getCurrentTime(); if (bmp == null) bmp = Bitmap.createBitmap(mRgba.cols(), mRgba.rows(), Bitmap.Config.ARGB_8888); if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_NONE && bb == null) { Log.i(TAG, "Buffer 1"); bb = ByteBuffer.allocate(bmp.getRowBytes() * bmp.getHeight()); Log.i(TAG, "Buffer 2"); bb.clear(); Log.i(TAG, "Buffer 3"); } try { Utils.matToBitmap(mRgba, bmp); measureTime[2] = connectedNode.getCurrentTime(); cameraInfo = cameraInfoPublisher.newMessage(); cameraInfo.getHeader().setFrameId("camera"); cameraInfo.getHeader().setStamp(currentTime); cameraInfo.setWidth(640); cameraInfo.setHeight(480); cameraInfoPublisher.publish(cameraInfo); measureTime[3] = connectedNode.getCurrentTime(); if (MainActivity.imageCompression >= MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG) { // Compressed image sensor_msgs.CompressedImage image = imagePublisher.newMessage(); if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG) image.setFormat("png"); else if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_JPEG) image.setFormat("jpeg"); image.getHeader().setStamp(currentTime); image.getHeader().setFrameId("camera"); measureTime[4] = connectedNode.getCurrentTime(); ByteArrayOutputStream baos = new ByteArrayOutputStream(); if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG) bmp.compress(Bitmap.CompressFormat.PNG, 100, baos); else if (MainActivity.imageCompression == MainActivity.IMAGE_TRANSPORT_COMPRESSION_JPEG) bmp.compress(Bitmap.CompressFormat.JPEG, MainActivity.imageCompressionQuality, baos); measureTime[5] = connectedNode.getCurrentTime(); stream.buffer().writeBytes(baos.toByteArray()); measureTime[6] = connectedNode.getCurrentTime(); image.setData(stream.buffer().copy()); measureTime[7] = connectedNode.getCurrentTime(); stream.buffer().clear(); imagePublisher.publish(image); measureTime[8] = connectedNode.getCurrentTime(); } else { // Raw image Log.i(TAG, "Raw image 1"); sensor_msgs.Image rawImage = rawImagePublisher.newMessage(); rawImage.getHeader().setStamp(currentTime); rawImage.getHeader().setFrameId("camera"); rawImage.setEncoding("rgba8"); rawImage.setWidth(bmp.getWidth()); rawImage.setHeight(bmp.getHeight()); rawImage.setStep(640); measureTime[4] = connectedNode.getCurrentTime(); Log.i(TAG, "Raw image 2"); bmp.copyPixelsToBuffer(bb); measureTime[5] = connectedNode.getCurrentTime(); Log.i(TAG, "Raw image 3"); stream.buffer().writeBytes(bb.array()); bb.clear(); measureTime[6] = connectedNode.getCurrentTime(); Log.i(TAG, "Raw image 4"); rawImage.setData(stream.buffer().copy()); stream.buffer().clear(); measureTime[7] = connectedNode.getCurrentTime(); Log.i(TAG, "Raw image 5"); rawImagePublisher.publish(rawImage); measureTime[8] = connectedNode.getCurrentTime(); Log.i(TAG, "Raw image 6"); } newTime = connectedNode.getCurrentTime(); stats[9][counter] = (newTime.subtract(oldTime)).nsecs / 1000000.0; oldTime = newTime; for (int i = 1; i < 9; i++) { stats[i][counter] = (measureTime[i].subtract(measureTime[i - 1])).nsecs / 1000000.0; } stats[0][counter] = measureTime[8].subtract(measureTime[0]).nsecs / 1000000.0; counter++; if (counter == numSamples) { double[] sts = new double[10]; Arrays.fill(sts, 0.0); for (int i = 0; i < 10; i++) { for (int j = 0; j < numSamples; j++) sts[i] += stats[i][j]; sts[i] /= (double) numSamples; if (MainActivity.imageCompression >= MainActivity.IMAGE_TRANSPORT_COMPRESSION_PNG) Log.i(TAG, String.format("Mean time for %s:\t\t%4.2fms", compDescStrings[i], sts[i])); else Log.i(TAG, String.format("Mean time for %s:\t\t%4.2fms", rawDescStrings[i], sts[i])); } Log.i(TAG, "\n\n"); counter = 0; } return bmp; } catch (Exception e) { Log.e(TAG, "Frame conversion and publishing throws an exception: " + e.getMessage()); bmp.recycle(); return null; } }