예제 #1
0
    @Override
    public synchronized void renderFrame(I420Frame frame) {
      if (surface == null) {
        // This object has been released.
        VideoRenderer.renderFrameDone(frame);
        return;
      }
      if (!seenFrame && rendererEvents != null) {
        Logging.d(TAG, "ID: " + id + ". Reporting first rendered frame.");
        rendererEvents.onFirstFrameRendered();
      }
      framesReceived++;
      synchronized (pendingFrameLock) {
        // Check input frame parameters.
        if (frame.yuvFrame) {
          if (frame.yuvStrides[0] < frame.width
              || frame.yuvStrides[1] < frame.width / 2
              || frame.yuvStrides[2] < frame.width / 2) {
            Logging.e(
                TAG,
                "Incorrect strides "
                    + frame.yuvStrides[0]
                    + ", "
                    + frame.yuvStrides[1]
                    + ", "
                    + frame.yuvStrides[2]);
            VideoRenderer.renderFrameDone(frame);
            return;
          }
        }

        if (pendingFrame != null) {
          // Skip rendering of this frame if previous frame was not rendered yet.
          framesDropped++;
          VideoRenderer.renderFrameDone(frame);
          return;
        }
        pendingFrame = frame;
      }
      setSize(frame.width, frame.height, frame.rotationDegree);
      seenFrame = true;

      // Request rendering.
      surface.requestRender();
    }
 @Override
 public void onDestroy() {
   Log.d(TAG, "VideoEngine onDestroy()");
   super.onDestroy();
   if (renderer != null) {
     renderer.release();
   }
   renderer = null;
 }
예제 #3
0
 private synchronized void release() {
   surface = null;
   synchronized (pendingFrameLock) {
     if (pendingFrame != null) {
       VideoRenderer.renderFrameDone(pendingFrame);
       pendingFrame = null;
     }
   }
 }
예제 #4
0
    private void draw(GlRectDrawer drawer) {
      if (!seenFrame) {
        // No frame received yet - nothing to render.
        return;
      }
      long now = System.nanoTime();

      final boolean isNewFrame;
      synchronized (pendingFrameLock) {
        isNewFrame = (pendingFrame != null);
        if (isNewFrame && startTimeNs == -1) {
          startTimeNs = now;
        }

        if (isNewFrame) {
          if (pendingFrame.yuvFrame) {
            rendererType = RendererType.RENDERER_YUV;
            drawer.uploadYuvData(
                yuvTextures,
                pendingFrame.width,
                pendingFrame.height,
                pendingFrame.yuvStrides,
                pendingFrame.yuvPlanes);
            // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
            // top-left corner of the image, but in glTexImage2D() the first element corresponds to
            // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
            // sampling matrix.
            final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
            rotatedSamplingMatrix =
                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
          } else {
            rendererType = RendererType.RENDERER_TEXTURE;
            // External texture rendering. Update texture image to latest and make a deep copy of
            // the external texture.
            // TODO(magjed): Move updateTexImage() to the video source instead.
            final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
            surfaceTexture.updateTexImage();
            final float[] samplingMatrix = new float[16];
            surfaceTexture.getTransformMatrix(samplingMatrix);
            rotatedSamplingMatrix =
                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);

            // Reallocate offscreen texture if necessary.
            textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());

            // Bind our offscreen framebuffer.
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
            GlUtil.checkNoGLES2Error("glBindFramebuffer");

            // Copy the OES texture content. This will also normalize the sampling matrix.
            GLES20.glViewport(0, 0, textureCopy.getWidth(), textureCopy.getHeight());
            drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix);
            rotatedSamplingMatrix = RendererCommon.identityMatrix();

            // Restore normal framebuffer.
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
          }
          copyTimeNs += (System.nanoTime() - now);
          VideoRenderer.renderFrameDone(pendingFrame);
          pendingFrame = null;
        }
      }

      // OpenGL defaults to lower left origin - flip vertically.
      GLES20.glViewport(
          displayLayout.left,
          screenHeight - displayLayout.bottom,
          displayLayout.width(),
          displayLayout.height());

      updateLayoutMatrix();
      final float[] texMatrix =
          RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
      if (rendererType == RendererType.RENDERER_YUV) {
        drawer.drawYuv(yuvTextures, texMatrix);
      } else {
        drawer.drawRgb(textureCopy.getTextureId(), texMatrix);
      }

      if (isNewFrame) {
        framesRendered++;
        drawTimeNs += (System.nanoTime() - now);
        if ((framesRendered % 300) == 0) {
          logStatistics();
        }
      }
    }
예제 #5
0
  /**
   * Open the player
   *
   * @param remoteHost Remote host
   * @param remotePort Remote port
   */
  public void open(String remoteHost, int remotePort) {
    if (opened) {
      // Already opened
      return;
    }

    // Check video codec
    if (selectedVideoCodec == null) {
      notifyPlayerEventError("Video codec not selected");
      return;
    }

    // Init video encoder
    try {
      NativeH264EncoderParams nativeH264EncoderParams = new NativeH264EncoderParams();

      // Codec dimensions
      nativeH264EncoderParams.setFrameWidth(selectedVideoCodec.getWidth());
      nativeH264EncoderParams.setFrameHeight(selectedVideoCodec.getHeight());
      nativeH264EncoderParams.setFrameRate(selectedVideoCodec.getFramerate());
      nativeH264EncoderParams.setBitRate(selectedVideoCodec.getBitrate());

      // Codec profile and level
      nativeH264EncoderParams.setProfilesAndLevel(selectedVideoCodec.getCodecParams());

      // Codec settings optimization
      nativeH264EncoderParams.setEncMode(NativeH264EncoderParams.ENCODING_MODE_STREAMING);
      nativeH264EncoderParams.setSceneDetection(false);

      if (logger.isActivated()) {
        logger.info(
            "Init H264Encoder "
                + selectedVideoCodec.getCodecParams()
                + " "
                + selectedVideoCodec.getWidth()
                + "x"
                + selectedVideoCodec.getHeight()
                + " "
                + selectedVideoCodec.getFramerate()
                + " "
                + selectedVideoCodec.getBitrate());
      }
      int result = NativeH264Encoder.InitEncoder(nativeH264EncoderParams);
      if (result != 0) {
        notifyPlayerEventError("Encoder init failed with error code " + result);
        return;
      }
    } catch (UnsatisfiedLinkError e) {
      notifyPlayerEventError(e.getMessage());
      return;
    }

    // Init the RTP layer
    try {
      releasePort();
      rtpSender = new VideoRtpSender(videoFormat, localRtpPort);
      rtpInput = new MediaRtpInput();
      rtpInput.open();
      if (videoRenderer != null) {
        // The video renderer is supposed to be opened and so we used its RTP stream
        if (logger.isActivated()) {
          logger.debug("Player shares the renderer RTP stream");
        }
        rtpSender.prepareSession(
            rtpInput, remoteHost, remotePort, videoRenderer.getRtpInputStream(), this);
      } else {
        // The video renderer doesn't exist and so we create a new RTP stream
        rtpSender.prepareSession(rtpInput, remoteHost, remotePort, this);
      }

    } catch (Exception e) {
      notifyPlayerEventError(e.getMessage());
      return;
    }

    // Player is opened
    opened = true;
    notifyPlayerEventOpened();
  }