/** * Test disconnecting the SurfaceTextureHelper, but keep trying to produce more texture frames. No * frames should be delivered to the listener. */ @MediumTest public static void testDisconnect() throws InterruptedException { // Create SurfaceTextureHelper and listener. final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(null); final MockTextureListener listener = new MockTextureListener(); surfaceTextureHelper.setListener(listener); // Create EglBase with the SurfaceTexture as target EGLSurface. final EglBase eglBase = EglBase.create(null, EglBase.ConfigType.PLAIN); eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture()); eglBase.makeCurrent(); // Assert no frame has been received yet. assertFalse(listener.waitForNewFrame(1)); // Draw and wait for one frame. GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // swapBuffers() will ultimately trigger onTextureFrameAvailable(). eglBase.swapBuffers(); listener.waitForNewFrame(); surfaceTextureHelper.returnTextureFrame(); // Disconnect - we should not receive any textures after this. surfaceTextureHelper.disconnect(); // Draw one frame. GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); eglBase.swapBuffers(); // swapBuffers() should not trigger onTextureFrameAvailable() because we are disconnected. // Assert that no OES texture was delivered. assertFalse(listener.waitForNewFrame(500)); eglBase.release(); }
/** * Test use SurfaceTextureHelper on a separate thread. A uniform texture frame is created and * received on a thread separate from the test thread and returned after disconnect. */ @MediumTest public static void testLateReturnFrameOnSeparateThread() throws InterruptedException { final HandlerThread thread = new HandlerThread("SurfaceTextureHelperTestThread"); thread.start(); final Handler handler = new Handler(thread.getLooper()); // Create SurfaceTextureHelper and listener. final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(null, handler); // Create a mock listener and expect frames to be delivered on |thread|. final MockTextureListener listener = new MockTextureListener(thread); surfaceTextureHelper.setListener(listener); // Create resources for stubbing an OES texture producer. |eglOesBase| has the // SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface. final EglBase eglOesBase = EglBase.create(null, EglBase.ConfigType.PLAIN); eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture()); eglOesBase.makeCurrent(); // Draw a frame onto the SurfaceTexture. GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // swapBuffers() will ultimately trigger onTextureFrameAvailable(). eglOesBase.swapBuffers(); eglOesBase.release(); // Wait for an OES texture to arrive. listener.waitForNewFrame(); surfaceTextureHelper.disconnect(handler); surfaceTextureHelper.returnTextureFrame(); }
// Pass null in |sharedContext| to configure the codec for ByteBuffer output. private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) { if (mediaCodecThread != null) { throw new RuntimeException("Forgot to release()?"); } useSurface = (sharedContext != null); String mime = null; String[] supportedCodecPrefixes = null; if (type == VideoCodecType.VIDEO_CODEC_VP8) { mime = VP8_MIME_TYPE; supportedCodecPrefixes = supportedVp8HwCodecPrefixes; } else if (type == VideoCodecType.VIDEO_CODEC_H264) { mime = H264_MIME_TYPE; supportedCodecPrefixes = supportedH264HwCodecPrefixes; } else { throw new RuntimeException("Non supported codec " + type); } DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); if (properties == null) { throw new RuntimeException("Cannot find HW decoder for " + type); } Logging.d( TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x" + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface); if (sharedContext != null) { Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); } mediaCodecThread = Thread.currentThread(); try { Surface decodeSurface = null; this.width = width; this.height = height; stride = width; sliceHeight = height; if (useSurface) { // Create shared EGL context. eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); eglBase.createDummyPbufferSurface(); eglBase.makeCurrent(); // Create output surface textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); Logging.d(TAG, "Video decoder TextureID = " + textureID); surfaceTexture = new SurfaceTexture(textureID); surface = new Surface(surfaceTexture); decodeSurface = surface; } MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); if (!useSurface) { format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); } Logging.d(TAG, " Format: " + format); mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName); if (mediaCodec == null) { return false; } mediaCodec.configure(format, decodeSurface, null, 0); mediaCodec.start(); colorFormat = properties.colorFormat; outputBuffers = mediaCodec.getOutputBuffers(); inputBuffers = mediaCodec.getInputBuffers(); Logging.d( TAG, "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length); return true; } catch (IllegalStateException e) { Logging.e(TAG, "initDecode failed", e); return false; } }
/** * Test disconnecting the SurfaceTextureHelper while holding a pending texture frame. The pending * texture frame should still be valid, and this is tested by drawing the texture frame to a pixel * buffer and reading it back with glReadPixels(). */ @MediumTest public static void testLateReturnFrame() throws InterruptedException { final int width = 16; final int height = 16; // Create EGL base with a pixel buffer as display output. final EglBase eglBase = EglBase.create(null, EglBase.ConfigType.PIXEL_BUFFER); eglBase.createPbufferSurface(width, height); // Create SurfaceTextureHelper and listener. final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(eglBase.getEglBaseContext()); final MockTextureListener listener = new MockTextureListener(); surfaceTextureHelper.setListener(listener); surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height); // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in // |surfaceTextureHelper| as the target EGLSurface. final EglBase eglOesBase = EglBase.create(eglBase.getEglBaseContext(), EglBase.ConfigType.PLAIN); eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture()); assertEquals(eglOesBase.surfaceWidth(), width); assertEquals(eglOesBase.surfaceHeight(), height); final int red = 79; final int green = 66; final int blue = 161; // Draw a constant color frame onto the SurfaceTexture. eglOesBase.makeCurrent(); GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); // swapBuffers() will ultimately trigger onTextureFrameAvailable(). eglOesBase.swapBuffers(); eglOesBase.release(); // Wait for OES texture frame. listener.waitForNewFrame(); // Diconnect while holding the frame. surfaceTextureHelper.disconnect(); // Draw the pending texture frame onto the pixel buffer. eglBase.makeCurrent(); final GlRectDrawer drawer = new GlRectDrawer(); drawer.drawOes(listener.oesTextureId, listener.transformMatrix); drawer.release(); // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9. final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4); GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData); GlUtil.checkNoGLES2Error("glReadPixels"); eglBase.release(); // Assert rendered image is expected constant color. while (rgbaData.hasRemaining()) { assertEquals(rgbaData.get() & 0xFF, red); assertEquals(rgbaData.get() & 0xFF, green); assertEquals(rgbaData.get() & 0xFF, blue); assertEquals(rgbaData.get() & 0xFF, 255); } // Late frame return after everything has been disconnected and released. surfaceTextureHelper.returnTextureFrame(); }