/** * Checks the frame for correctness, using GL to check RGB values. * * @return true if the frame looks good */ private boolean checkSurfaceFrame(int frameIndex) { ByteBuffer pixelBuf = ByteBuffer.allocateDirect(4); // TODO - reuse this boolean frameFailed = false; for (int i = 0; i < 8; i++) { // Note the coordinates are inverted on the Y-axis in GL. int x, y; if (i < 4) { x = i * (mWidth / 4) + (mWidth / 8); y = (mHeight * 3) / 4; } else { x = (7 - i) * (mWidth / 4) + (mWidth / 8); y = mHeight / 4; } GLES20.glReadPixels(x, y, 1, 1, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, pixelBuf); int r = pixelBuf.get(0) & 0xff; int g = pixelBuf.get(1) & 0xff; int b = pixelBuf.get(2) & 0xff; // Log.d(TAG, "GOT(" + frameIndex + "/" + i + "): r=" + r + " g=" + g + " b=" + b); int expR, expG, expB; if (i == frameIndex % 8) { // colored rect (green/blue swapped) expR = TEST_R1; expG = TEST_B1; expB = TEST_G1; } else { // zero background color (green/blue swapped) expR = TEST_R0; expG = TEST_B0; expB = TEST_G0; } if (!isColorClose(r, expR) || !isColorClose(g, expG) || !isColorClose(b, expB)) { Log.w( TAG, "Bad frame " + frameIndex + " (rect=" + i + ": rgb=" + r + "," + g + "," + b + " vs. expected " + expR + "," + expG + "," + expB + ")"); frameFailed = true; } } return !frameFailed; }
public final void run() { IntBuffer intbuffer = IntBuffer.allocate(a * b); GLES20.glReadPixels(0, 0, a, b, 6408, 5121, intbuffer); int ai[] = intbuffer.array(); for (int i = 0; i < b; i++) { for (int j = 0; j < a; j++) { c[(b - i - 1) * a + j] = ai[a * i + j]; } } d.release(); }
public synchronized void put() { while (full) { try { wait(); } catch (InterruptedException e) { Log.e(TAG, "Interrupted when waiting for space in buffer"); } } GLES20.glReadPixels(0, 0, mWidth, mHeight, GL10.GL_RGBA, GL10.GL_UNSIGNED_BYTE, buf); buf.get(bytes); buf.rewind(); full = true; notifyAll(); }
private void selectionDraw(GL10 glUnused) { setFBO(glUnused, true); if (layers == null) { return; } synchronized (layers) { for (Layer layer : getLayers()) { if (layer.isEnabled() && (layer instanceof SelectableLayer)) { camera.pushM(); if (layer instanceof TfLayer) { GraphName layerFrame = ((TfLayer) layer).getFrame(); if (layerFrame != null) { Transform t = Utility.newTransformIfPossible( frameTransformTree, layerFrame, camera.getFixedFrame()); camera.applyTransform(t); } } ((SelectableLayer) layer).selectionDraw(glUnused); camera.popM(); } } } // is THIS your card? ByteBuffer colorBuf = ByteBuffer.allocateDirect(4).order(ByteOrder.nativeOrder()); colorBuf.position(0); Point selected = camera.getSelectionManager().getSelectionCoordinates(); selected.set(selected.x, (camera.getViewport().getHeight() - selected.y)); GLES20.glReadPixels( selected.x, selected.y, 1, 1, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, colorBuf); colorBuf.position(0); Color selectedColor = new Color( (colorBuf.get() & 0xff) / 255f, (colorBuf.get() & 0xff) / 255f, (colorBuf.get() & 0xff) / 255f, 1f); camera.getSelectionManager().selectItemWithColor(selectedColor); setFBO(glUnused, false); }
/** * Saves the EGL surface to a file. * * <p>Expects that this object's EGL surface is current. */ public void saveFrame(File file) throws IOException { if (!mEglCore.isCurrent(mEGLSurface)) { throw new RuntimeException("Expected EGL context/surface is not current"); } // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA // data (i.e. a byte of red, followed by a byte of green...). While the Bitmap // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the // Bitmap "copy pixels" method wants the same format GL provides. // // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling // here often. // // Making this even more interesting is the upside-down nature of GL, which means // our output will look upside down relative to what appears on screen if the // typical GL conventions are used. String filename = file.toString(); int width = getWidth(); int height = getHeight(); ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4); buf.order(ByteOrder.LITTLE_ENDIAN); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf); GlUtil.checkGlError("glReadPixels"); buf.rewind(); BufferedOutputStream bos = null; try { bos = new BufferedOutputStream(new FileOutputStream(filename)); Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888); bmp.copyPixelsFromBuffer(buf); bmp.compress(Bitmap.CompressFormat.PNG, 90, bos); bmp.recycle(); } finally { if (bos != null) bos.close(); } Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'"); }
@Override public void glReadPixels( int x, int y, int width, int height, int format, int type, Buffer pixels) { GLES20.glReadPixels(x, y, width, height, format, type, pixels); }
/** * Test disconnecting the SurfaceTextureHelper while holding a pending texture frame. The pending * texture frame should still be valid, and this is tested by drawing the texture frame to a pixel * buffer and reading it back with glReadPixels(). */ @MediumTest public static void testLateReturnFrame() throws InterruptedException { final int width = 16; final int height = 16; // Create EGL base with a pixel buffer as display output. final EglBase eglBase = EglBase.create(null, EglBase.ConfigType.PIXEL_BUFFER); eglBase.createPbufferSurface(width, height); // Create SurfaceTextureHelper and listener. final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(eglBase.getEglBaseContext()); final MockTextureListener listener = new MockTextureListener(); surfaceTextureHelper.setListener(listener); surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in // |surfaceTextureHelper| as the target EGLSurface. final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.ConfigType.PLAIN); eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture()); assertEquals(eglOesBase.surfaceWidth(), width); assertEquals(eglOesBase.surfaceHeight(), height); final int red = 79; final int green = 66; final int blue = 161; // Draw a constant color frame onto the SurfaceTexture. eglOesBase.makeCurrent(); GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // swapBuffers() will ultimately trigger onTextureFrameAvailable(). eglOesBase.swapBuffers(); eglOesBase.release(); // Wait for OES texture frame. listener.waitForNewFrame(); // Diconnect while holding the frame. surfaceTextureHelper.disconnect(); // Draw the pending texture frame onto the pixel buffer. eglBase.makeCurrent(); final GlRectDrawer drawer = new GlRectDrawer(); drawer.drawOes(listener.oesTextureId, listener.transformMatrix); drawer.release(); // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9. final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData); GlUtil.checkNoGLES2Error("glReadPixels"); eglBase.release(); // Assert rendered image is expected constant color. while (rgbaData.hasRemaining()) { assertEquals(rgbaData.get() & 0xFF, red); assertEquals(rgbaData.get() & 0xFF, green); assertEquals(rgbaData.get() & 0xFF, blue); assertEquals(rgbaData.get() & 0xFF, 255); } // Late frame return after everything has been disconnected and released. surfaceTextureHelper.returnTextureFrame(); }