コード例 #1
0
    @Override
    public synchronized void renderFrame(I420Frame frame) {
      if (surface == null) {
        // This object has been released.
        VideoRenderer.renderFrameDone(frame);
        return;
      }
      if (!seenFrame && rendererEvents != null) {
        Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
        rendererEvents.onFirstFrameRendered();
      }
      framesReceived++;
      synchronized (pendingFrameLock) {
        // Check input frame parameters.
        if (frame.yuvFrame) {
          if (frame.yuvStrides[0] < frame.width
              || frame.yuvStrides[1] < frame.width / 2
              || frame.yuvStrides[2] < frame.width / 2) {
            Logging.e(
                TAG,
                "Incorrect strides "
                    + frame.yuvStrides[0]
                    + ", "
                    + frame.yuvStrides[1]
                    + ", "
                    + frame.yuvStrides[2]);
            VideoRenderer.renderFrameDone(frame);
            return;
          }
        }

        if (pendingFrame != null) {
          // Skip rendering of this frame if previous frame was not rendered yet.
          framesDropped++;
          VideoRenderer.renderFrameDone(frame);
          return;
        }
        pendingFrame = frame;
      }
      setSize(frame.width, frame.height, frame.rotationDegree);
      seenFrame = true;

      // Request rendering.
      surface.requestRender();
    }
コード例 #2
0
 private synchronized void release() {
   surface = null;
   synchronized (pendingFrameLock) {
     if (pendingFrame != null) {
       VideoRenderer.renderFrameDone(pendingFrame);
       pendingFrame = null;
     }
   }
 }
コード例 #3
0
    private void draw(GlRectDrawer drawer) {
      if (!seenFrame) {
        // No frame received yet - nothing to render.
        return;
      }
      long now = System.nanoTime();

      final boolean isNewFrame;
      synchronized (pendingFrameLock) {
        isNewFrame = (pendingFrame != null);
        if (isNewFrame && startTimeNs == -1) {
          startTimeNs = now;
        }

        if (isNewFrame) {
          if (pendingFrame.yuvFrame) {
            rendererType = RendererType.RENDERER_YUV;
            drawer.uploadYuvData(
                yuvTextures,
                pendingFrame.width,
                pendingFrame.height,
                pendingFrame.yuvStrides,
                pendingFrame.yuvPlanes);
            // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
            // top-left corner of the image, but in glTexImage2D() the first element corresponds to
            // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
            // sampling matrix.
            final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
            rotatedSamplingMatrix =
                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
          } else {
            rendererType = RendererType.RENDERER_TEXTURE;
            // External texture rendering. Update texture image to latest and make a deep copy of
            // the external texture.
            // TODO(magjed): Move updateTexImage() to the video source instead.
            final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
            surfaceTexture.updateTexImage();
            final float[] samplingMatrix = new float[16];
            surfaceTexture.getTransformMatrix(samplingMatrix);
            rotatedSamplingMatrix =
                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);

            // Reallocate offscreen texture if necessary.
            textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());

            // Bind our offscreen framebuffer.
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
            GlUtil.checkNoGLES2Error("glBindFramebuffer");

            // Copy the OES texture content. This will also normalize the sampling matrix.
            GLES20.glViewport(0, 0, textureCopy.getWidth(), textureCopy.getHeight());
            drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix);
            rotatedSamplingMatrix = RendererCommon.identityMatrix();

            // Restore normal framebuffer.
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
          }
          copyTimeNs += (System.nanoTime() - now);
          VideoRenderer.renderFrameDone(pendingFrame);
          pendingFrame = null;
        }
      }

      // OpenGL defaults to lower left origin - flip vertically.
      GLES20.glViewport(
          displayLayout.left,
          screenHeight - displayLayout.bottom,
          displayLayout.width(),
          displayLayout.height());

      updateLayoutMatrix();
      final float[] texMatrix =
          RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
      if (rendererType == RendererType.RENDERER_YUV) {
        drawer.drawYuv(yuvTextures, texMatrix);
      } else {
        drawer.drawRgb(textureCopy.getTextureId(), texMatrix);
      }

      if (isNewFrame) {
        framesRendered++;
        drawTimeNs += (System.nanoTime() - now);
        if ((framesRendered % 300) == 0) {
          logStatistics();
        }
      }
    }