/** * Prepares OpenGL ES before we draw a frame. * * @param headTransform The head transformation in the new frame. */ @Override public void onNewFrame(HeadTransform headTransform) { // GLES20.glUseProgram(mGlProgram); // // mModelViewProjectionParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVP"); // mLightPosParam = GLES20.glGetUniformLocation(mGlProgram, "u_LightPos"); // mModelViewParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVMatrix"); // mModelParam = GLES20.glGetUniformLocation(mGlProgram, "u_Model"); // mIsFloorParam = GLES20.glGetUniformLocation(mGlProgram, "u_IsFloor"); // // // Build the Model part of the ModelView matrix. // Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f); // // // Build the camera matrix and apply it to the ModelView. // Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, // 0.0f); // // headTransform.getHeadView(mHeadView, 0); // // checkGLError("onReadyToDraw"); float[] mtx = new float[16]; GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); surface.updateTexImage(); surface.getTransformMatrix(mtx); }
/** * Prepares OpenGL ES before we draw a frame. * * @param headTransform The head transformation in the new frame. */ @Override public void onNewFrame(HeadTransform headTransform) { float[] mtx = new float[16]; GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); surface.updateTexImage(); surface.getTransformMatrix(mtx); }
/** * Retrieve the 4x4 texture coordinate transform matrix associated with the texture image set by * the most recent call to updateTexImage. * * @param matrix the array into which the 4x4 matrix will be stored. The array must have exactly * 16 elements. * @return true for success to get the matrix and false for fail to get it. */ public boolean getTransformMatrix(float[] matrix) { if (mSurfaceTexture == null) { return false; } else { mSurfaceTexture.getTransformMatrix(matrix); return true; } }
public void onDrawFrame(GL10 glUnused) { synchronized (this) { if (updateSurface) { mSurface.updateTexImage(); mSurface.getTransformMatrix(mSTMatrix); updateSurface = false; } else { return; } } GLES20.glClearColor(255.0f, 255.0f, 255.0f, 1.0f); GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glUseProgram(mProgram); checkGlError("glUseProgram"); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID); mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); GLES20.glVertexAttribPointer( maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); checkGlError("glVertexAttribPointer maPosition"); GLES20.glEnableVertexAttribArray(maPositionHandle); checkGlError("glEnableVertexAttribArray maPositionHandle"); mTextureVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); GLES20.glVertexAttribPointer( maTextureHandle, 2, GLES20.GL_FLOAT, false, TEXTURE_VERTICES_DATA_STRIDE_BYTES, mTextureVertices); checkGlError("glVertexAttribPointer maTextureHandle"); GLES20.glEnableVertexAttribArray(maTextureHandle); checkGlError("glEnableVertexAttribArray maTextureHandle"); Matrix.setIdentityM(mMVPMatrix, 0); GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); checkGlError("glDrawArrays"); GLES20.glFinish(); }
private boolean captureScreenshotTextureAndSetViewport() { if (!attachEglContext()) { return false; } try { if (!mTexNamesGenerated) { GLES10.glGenTextures(1, mTexNames, 0); if (checkGlErrors("glGenTextures")) { return false; } mTexNamesGenerated = true; } final SurfaceTexture st = new SurfaceTexture(mTexNames[0]); final Surface s = new Surface(st); try { SurfaceControl.screenshot( SurfaceControl.getBuiltInDisplay(SurfaceControl.BUILT_IN_DISPLAY_ID_MAIN), s); } finally { s.release(); } st.updateTexImage(); st.getTransformMatrix(mTexMatrix); // Set up texture coordinates for a quad. // We might need to change this if the texture ends up being // a different size from the display for some reason. mTexCoordBuffer.put(0, 0f); mTexCoordBuffer.put(1, 0f); mTexCoordBuffer.put(2, 0f); mTexCoordBuffer.put(3, 1f); mTexCoordBuffer.put(4, 1f); mTexCoordBuffer.put(5, 1f); mTexCoordBuffer.put(6, 1f); mTexCoordBuffer.put(7, 0f); // Set up our viewport. GLES10.glViewport(0, 0, mDisplayWidth, mDisplayHeight); GLES10.glMatrixMode(GLES10.GL_PROJECTION); GLES10.glLoadIdentity(); GLES10.glOrthof(0, mDisplayWidth, 0, mDisplayHeight, 0, 1); GLES10.glMatrixMode(GLES10.GL_MODELVIEW); GLES10.glLoadIdentity(); GLES10.glMatrixMode(GLES10.GL_TEXTURE); GLES10.glLoadIdentity(); GLES10.glLoadMatrixf(mTexMatrix, 0); } finally { detachEglContext(); } return true; }
@Override public void process(FilterContext context) { if (mLogVerbose) Log.v(TAG, "Processing new frame"); // First, get new frame if available if (mWaitForNewFrame || mFirstFrame) { boolean gotNewFrame; if (mWaitTimeout != 0) { gotNewFrame = mNewFrameAvailable.block(mWaitTimeout); if (!gotNewFrame) { if (!mCloseOnTimeout) { throw new RuntimeException("Timeout waiting for new frame"); } else { if (mLogVerbose) Log.v(TAG, "Timeout waiting for a new frame. Closing."); closeOutputPort("video"); return; } } } else { mNewFrameAvailable.block(); } mNewFrameAvailable.close(); mFirstFrame = false; } mSurfaceTexture.updateTexImage(); mSurfaceTexture.getTransformMatrix(mFrameTransform); Matrix.multiplyMM( mMappedCoords, 0, mFrameTransform, 0, mSourceCoords, 0); mFrameExtractor.setSourceRegion( mMappedCoords[0], mMappedCoords[1], mMappedCoords[4], mMappedCoords[5], mMappedCoords[8], mMappedCoords[9], mMappedCoords[12], mMappedCoords[13]); // Next, render to output Frame output = context.getFrameManager().newFrame(mOutputFormat); mFrameExtractor.process(mMediaFrame, output); output.setTimestamp(mSurfaceTexture.getTimestamp()); pushOutput("video", output); output.release(); }
@Override public void draw(GLCanvas canvas, int x, int y, int width, int height) { synchronized (this) { if (!mHasTexture) return; mSurfaceTexture.updateTexImage(); mSurfaceTexture.getTransformMatrix(mTransform); // Flip vertically. canvas.save(GLCanvas.SAVE_FLAG_MATRIX); int cx = x + width / 2; int cy = y + height / 2; canvas.translate(cx, cy); canvas.scale(1, -1, 1); canvas.translate(-cx, -cy); canvas.drawTexture(mExtTexture, mTransform, x, y, width, height); canvas.restore(); } }
@Override public void onNewFrame(HeadTransform headTransform) { Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f); // Build the camera matrix and apply it to the ModelView. Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f); headTransform.getHeadView(headView, 0); checkGLError("onReadyToDraw"); float[] mtx = new float[16]; GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT); surface.updateTexImage(); surface.getTransformMatrix(mtx); mHeadTransform = headTransform; headTransform.getHeadView(headView, 0); }
@Override public void onDrawFrame(GL10 unused) { if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureId); boolean showBox = false; // Latch the latest frame. If there isn't anything new, we'll just re-use whatever // was there before. mSurfaceTexture.updateTexImage(); // If the recording state is changing, take care of it here. Ideally we wouldn't // be doing all this in onDrawFrame(), but the EGLContext sharing with GLSurfaceView // makes it hard to do elsewhere. if (mRecordingEnabled) { switch (mRecordingStatus) { case RECORDING_OFF: Log.d(TAG, "START recording"); // start recording mVideoEncoder.startRecording( new TextureMovieEncoder.EncoderConfig( mOutputFile, 480, 480, 1000000, EGL14.eglGetCurrentContext())); // TODO: get optimal width and height according to specified devices mRecordingStatus = RECORDING_ON; break; case RECORDING_RESUMED: Log.d(TAG, "RESUME recording"); mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext()); mRecordingStatus = RECORDING_ON; break; case RECORDING_ON: // yay break; default: throw new RuntimeException("unknown status " + mRecordingStatus); } } else { switch (mRecordingStatus) { case RECORDING_ON: case RECORDING_RESUMED: // stop recording Log.d(TAG, "STOP recording"); mVideoEncoder.stopRecording(); mRecordingStatus = RECORDING_OFF; break; case RECORDING_OFF: // yay break; default: throw new RuntimeException("unknown status " + mRecordingStatus); } } // Set the video encoder's texture name. We only need to do this once, but in the // current implementation it has to happen after the video encoder is started, so // we just do it here. // // TODO: be less lame. mVideoEncoder.setTextureId(mTextureId); // Tell the video encoder thread that a new frame is available. // This will be ignored if we're not actually recording. mVideoEncoder.frameAvailable(mSurfaceTexture); if (mIncomingWidth <= 0 || mIncomingHeight <= 0) { // Texture size isn't set yet. This is only used for the filters, but to be // safe we can just skip drawing while we wait for the various races to resolve. // (This seems to happen if you toggle the screen off/on with power button.) Log.i(TAG, "Drawing before incoming texture size set; skipping"); return; } // Update the filter, if necessary. if (mCurrentFilter != mNewFilter) { updateFilter(); } if (mIncomingSizeUpdated) { mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight); mIncomingSizeUpdated = false; } // Draw the video frame. mSurfaceTexture.getTransformMatrix(mSTMatrix); mFullScreen.drawFrame(mTextureId, mSTMatrix); // Draw a flashing box if we're recording. This only appears on screen. showBox = (mRecordingStatus == RECORDING_ON); if (showBox && (++mFrameCount & 0x04) == 0) { drawBox(); } }
private void draw(GlRectDrawer drawer) { if (!seenFrame) { // No frame received yet - nothing to render. return; } long now = System.nanoTime(); final boolean isNewFrame; synchronized (pendingFrameLock) { isNewFrame = (pendingFrame != null); if (isNewFrame && startTimeNs == -1) { startTimeNs = now; } if (isNewFrame) { if (pendingFrame.yuvFrame) { rendererType = RendererType.RENDERER_YUV; drawer.uploadYuvData( yuvTextures, pendingFrame.width, pendingFrame.height, pendingFrame.yuvStrides, pendingFrame.yuvPlanes); // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the // top-left corner of the image, but in glTexImage2D() the first element corresponds to // the bottom-left corner. We correct this discrepancy by setting a vertical flip as // sampling matrix. final float[] samplingMatrix = RendererCommon.verticalFlipMatrix(); rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree); } else { rendererType = RendererType.RENDERER_TEXTURE; // External texture rendering. Update texture image to latest and make a deep copy of // the external texture. // TODO(magjed): Move updateTexImage() to the video source instead. final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject; surfaceTexture.updateTexImage(); final float[] samplingMatrix = new float[16]; surfaceTexture.getTransformMatrix(samplingMatrix); rotatedSamplingMatrix = RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree); // Reallocate offscreen texture if necessary. textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight()); // Bind our offscreen framebuffer. GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId()); GlUtil.checkNoGLES2Error("glBindFramebuffer"); // Copy the OES texture content. This will also normalize the sampling matrix. GLES20.glViewport(0, 0, textureCopy.getWidth(), textureCopy.getHeight()); drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix); rotatedSamplingMatrix = RendererCommon.identityMatrix(); // Restore normal framebuffer. GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0); } copyTimeNs += (System.nanoTime() - now); VideoRenderer.renderFrameDone(pendingFrame); pendingFrame = null; } } // OpenGL defaults to lower left origin - flip vertically. GLES20.glViewport( displayLayout.left, screenHeight - displayLayout.bottom, displayLayout.width(), displayLayout.height()); updateLayoutMatrix(); final float[] texMatrix = RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix); if (rendererType == RendererType.RENDERER_YUV) { drawer.drawYuv(yuvTextures, texMatrix); } else { drawer.drawRgb(textureCopy.getTextureId(), texMatrix); } if (isNewFrame) { framesRendered++; drawTimeNs += (System.nanoTime() - now); if ((framesRendered % 300) == 0) { logStatistics(); } } }
@SuppressLint("NewApi") public void getSurfaceTextureTransformMatrix(float[] mtx) { mSurfaceTextureLock.lock(); if (mSurfaceTexture != null) mSurfaceTexture.getTransformMatrix(mtx); mSurfaceTextureLock.unlock(); }
public void drawFrame(SurfaceTexture st) { checkGlError("onDrawFrame start"); st.getTransformMatrix(mSTMatrix); GLES20.glClearColor(0.0F, 1.0F, 0.0F, 1.0F); GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT); GLES20.glUseProgram(mProgram); checkGlError("glUseProgram"); GLES20.glActiveTexture(GLES20.GL_TEXTURE0); GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); mTriangleVertices1.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); GLES20.glVertexAttribPointer( maPositionHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices1); checkGlError("glVertexAttribPointer maPosition"); GLES20.glEnableVertexAttribArray(maPositionHandle); checkGlError("glEnableVertexAttribArray maPositionHandle"); mTriangleVertices1.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); GLES20.glVertexAttribPointer( maTextureHandle, 3, GLES20.GL_FLOAT, false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices1); checkGlError("glVertexAttribPointer maTextureHandle"); GLES20.glEnableVertexAttribArray(maTextureHandle); checkGlError("glEnableVertexAttribArray maTextureHandle"); Matrix.setIdentityM(mMVPMatrix, 0); GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); checkGlError("glDrawArrays"); mTriangleVertices2.position(0); GLES20.glVertexAttribPointer(maPositionHandle, 3, 5126, false, 20, mTriangleVertices2); checkGlError("glVertexAttribPointer maPosition"); GLES20.glEnableVertexAttribArray(maPositionHandle); checkGlError("glEnableVertexAttribArray maPositionHandle"); mTriangleVertices2.position(3); GLES20.glVertexAttribPointer(maTextureHandle, 3, 5126, false, 20, mTriangleVertices2); checkGlError("glVertexAttribPointer maTextureHandle"); GLES20.glEnableVertexAttribArray(maTextureHandle); checkGlError("glEnableVertexAttribArray maTextureHandle"); Matrix.setIdentityM(mMVPMatrix, 0); GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0); GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0); GLES20.glDrawArrays(5, 0, 4); checkGlError("glDrawArrays"); GLES20.glFinish(); /* * checkGlError("onDrawFrame start"); st.getTransformMatrix(mSTMatrix); * GLES20.glClearColor(0.0f, 1.0f, 0.0f, 1.0f); * GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | * GLES20.GL_COLOR_BUFFER_BIT); GLES20.glUseProgram(mProgram); * checkGlError("glUseProgram"); * GLES20.glActiveTexture(GLES20.GL_TEXTURE0); * GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, mTextureID); * mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET); * GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, * false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); * checkGlError("glVertexAttribPointer maPosition"); * GLES20.glEnableVertexAttribArray(maPositionHandle); * checkGlError("glEnableVertexAttribArray maPositionHandle"); * mTriangleVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET); * GLES20.glVertexAttribPointer(maTextureHandle, 2, GLES20.GL_FLOAT, * false, TRIANGLE_VERTICES_DATA_STRIDE_BYTES, mTriangleVertices); * checkGlError("glVertexAttribPointer maTextureHandle"); * GLES20.glEnableVertexAttribArray(maTextureHandle); * checkGlError("glEnableVertexAttribArray maTextureHandle"); * Matrix.setIdentityM(mMVPMatrix, 0); * GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, * 0); GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, * 0); GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4); * checkGlError("glDrawArrays"); GLES20.glFinish(); */ }