Exemplo n.º 1
0
  @SuppressLint("NewApi")
  @Override
  public void onSurfaceCreated(GL10 unused, EGLConfig config) {
    Logging.d(TAG, "VideoRendererGui.onSurfaceCreated");
    // Store render EGL context.
    if (CURRENT_SDK_VERSION >= EGL14_SDK_VERSION) {
      synchronized (VideoRendererGui.class) {
        eglContext = EGL14.eglGetCurrentContext();
        Logging.d(TAG, "VideoRendererGui EGL Context: " + eglContext);
      }
    }

    synchronized (yuvImageRenderers) {
      // Create drawer for YUV/OES frames.
      drawer = new GlRectDrawer();
      // Create textures for all images.
      for (YuvImageRenderer yuvImageRenderer : yuvImageRenderers) {
        yuvImageRenderer.createTextures();
      }
      onSurfaceCreatedCalled = true;
    }
    GlUtil.checkNoGLES2Error("onSurfaceCreated done");
    GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
    GLES20.glClearColor(0.15f, 0.15f, 0.15f, 1.0f);

    // Fire EGL context ready event.
    synchronized (VideoRendererGui.class) {
      if (eglContextReady != null) {
        eglContextReady.run();
      }
    }
  }
 /**
  * Creates a test texture in the current GL context.
  *
  * <p>This follows image conventions, so the pixel data at offset zero is intended to appear in
  * the top-left corner. Color values for non-opaque alpha will be pre-multiplied.
  *
  * @return Handle to texture.
  */
 public static int createTestTexture(Image which) {
   ByteBuffer buf;
   switch (which) {
     case COARSE:
       buf = sCoarseImageData;
       break;
     case FINE:
       buf = sFineImageData;
       break;
     default:
       throw new RuntimeException("unknown image");
   }
   return GlUtil.createImageTexture(buf, TEX_SIZE, TEX_SIZE, FORMAT);
 }
Exemplo n.º 3
0
    private void createTextures() {
      Logging.d(
          TAG,
          "  YuvImageRenderer.createTextures "
              + id
              + " on GL thread:"
              + Thread.currentThread().getId());

      // Generate 3 texture ids for Y/U/V and place them into |yuvTextures|.
      for (int i = 0; i < 3; i++) {
        yuvTextures[i] = GlUtil.generateTexture(GLES20.GL_TEXTURE_2D);
      }
      // Generate texture and framebuffer for offscreen texture copy.
      textureCopy = new GlTextureFrameBuffer(GLES20.GL_RGB);
    }
  /**
   * Saves the EGL surface to a file.
   *
   * <p>Expects that this object's EGL surface is current.
   */
  public void saveFrame(File file) throws IOException {
    if (!mEglCore.isCurrent(mEGLSurface)) {
      throw new RuntimeException("Expected EGL context/surface is not current");
    }

    // glReadPixels fills in a "direct" ByteBuffer with what is essentially big-endian RGBA
    // data (i.e. a byte of red, followed by a byte of green...).  While the Bitmap
    // constructor that takes an int[] wants little-endian ARGB (blue/red swapped), the
    // Bitmap "copy pixels" method wants the same format GL provides.
    //
    // Ideally we'd have some way to re-use the ByteBuffer, especially if we're calling
    // here often.
    //
    // Making this even more interesting is the upside-down nature of GL, which means
    // our output will look upside down relative to what appears on screen if the
    // typical GL conventions are used.

    String filename = file.toString();

    int width = getWidth();
    int height = getHeight();
    ByteBuffer buf = ByteBuffer.allocateDirect(width * height * 4);
    buf.order(ByteOrder.LITTLE_ENDIAN);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, buf);
    GlUtil.checkGlError("glReadPixels");
    buf.rewind();

    BufferedOutputStream bos = null;
    try {
      bos = new BufferedOutputStream(new FileOutputStream(filename));
      Bitmap bmp = Bitmap.createBitmap(width, height, Bitmap.Config.ARGB_8888);
      bmp.copyPixelsFromBuffer(buf);
      bmp.compress(Bitmap.CompressFormat.PNG, 90, bos);
      bmp.recycle();
    } finally {
      if (bos != null) bos.close();
    }
    Log.d(TAG, "Saved " + width + "x" + height + " frame as '" + filename + "'");
  }
  // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
  private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
    if (mediaCodecThread != null) {
      throw new RuntimeException("Forgot to release()?");
    }
    useSurface = (sharedContext != null);
    String mime = null;
    String[] supportedCodecPrefixes = null;
    if (type == VideoCodecType.VIDEO_CODEC_VP8) {
      mime = VP8_MIME_TYPE;
      supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
    } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
      mime = H264_MIME_TYPE;
      supportedCodecPrefixes = supportedH264HwCodecPrefixes;
    } else {
      throw new RuntimeException("Non supported codec " + type);
    }
    DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
    if (properties == null) {
      throw new RuntimeException("Cannot find HW decoder for " + type);
    }
    Logging.d(
        TAG,
        "Java initDecode: "
            + type
            + " : "
            + width
            + " x "
            + height
            + ". Color: 0x"
            + Integer.toHexString(properties.colorFormat)
            + ". Use Surface: "
            + useSurface);
    if (sharedContext != null) {
      Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
    }
    mediaCodecThread = Thread.currentThread();
    try {
      Surface decodeSurface = null;
      this.width = width;
      this.height = height;
      stride = width;
      sliceHeight = height;

      if (useSurface) {
        // Create shared EGL context.
        eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
        eglBase.createDummyPbufferSurface();
        eglBase.makeCurrent();

        // Create output surface
        textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
        Logging.d(TAG, "Video decoder TextureID = " + textureID);
        surfaceTexture = new SurfaceTexture(textureID);
        surface = new Surface(surfaceTexture);
        decodeSurface = surface;
      }

      MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
      if (!useSurface) {
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
      }
      Logging.d(TAG, "  Format: " + format);
      mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName);
      if (mediaCodec == null) {
        return false;
      }
      mediaCodec.configure(format, decodeSurface, null, 0);
      mediaCodec.start();
      colorFormat = properties.colorFormat;
      outputBuffers = mediaCodec.getOutputBuffers();
      inputBuffers = mediaCodec.getInputBuffers();
      Logging.d(
          TAG,
          "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length);
      return true;
    } catch (IllegalStateException e) {
      Logging.e(TAG, "initDecode failed", e);
      return false;
    }
  }
  /**
   * Test disconnecting the SurfaceTextureHelper while holding a pending texture frame. The pending
   * texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
   * buffer and reading it back with glReadPixels().
   */
  @MediumTest
  public static void testLateReturnFrame() throws InterruptedException {
    final int width = 16;
    final int height = 16;
    // Create EGL base with a pixel buffer as display output.
    final EglBase eglBase = EglBase.create(null, EglBase.ConfigType.PIXEL_BUFFER);
    eglBase.createPbufferSurface(width, height);

    // Create SurfaceTextureHelper and listener.
    final SurfaceTextureHelper surfaceTextureHelper =
        SurfaceTextureHelper.create(eglBase.getEglBaseContext());
    final MockTextureListener listener = new MockTextureListener();
    surfaceTextureHelper.setListener(listener);
    surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);

    // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
    // |surfaceTextureHelper| as the target EGLSurface.
    final EglBase eglOesBase =
        EglBase.create(eglBase.getEglBaseContext(), EglBase.ConfigType.PLAIN);
    eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
    assertEquals(eglOesBase.surfaceWidth(), width);
    assertEquals(eglOesBase.surfaceHeight(), height);

    final int red = 79;
    final int green = 66;
    final int blue = 161;
    // Draw a constant color frame onto the SurfaceTexture.
    eglOesBase.makeCurrent();
    GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    // swapBuffers() will ultimately trigger onTextureFrameAvailable().
    eglOesBase.swapBuffers();
    eglOesBase.release();

    // Wait for OES texture frame.
    listener.waitForNewFrame();
    // Diconnect while holding the frame.
    surfaceTextureHelper.disconnect();

    // Draw the pending texture frame onto the pixel buffer.
    eglBase.makeCurrent();
    final GlRectDrawer drawer = new GlRectDrawer();
    drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
    drawer.release();

    // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
    final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
    GlUtil.checkNoGLES2Error("glReadPixels");
    eglBase.release();

    // Assert rendered image is expected constant color.
    while (rgbaData.hasRemaining()) {
      assertEquals(rgbaData.get() & 0xFF, red);
      assertEquals(rgbaData.get() & 0xFF, green);
      assertEquals(rgbaData.get() & 0xFF, blue);
      assertEquals(rgbaData.get() & 0xFF, 255);
    }
    // Late frame return after everything has been disconnected and released.
    surfaceTextureHelper.returnTextureFrame();
  }
Exemplo n.º 7
0
    private void draw(GlRectDrawer drawer) {
      if (!seenFrame) {
        // No frame received yet - nothing to render.
        return;
      }
      long now = System.nanoTime();

      final boolean isNewFrame;
      synchronized (pendingFrameLock) {
        isNewFrame = (pendingFrame != null);
        if (isNewFrame && startTimeNs == -1) {
          startTimeNs = now;
        }

        if (isNewFrame) {
          if (pendingFrame.yuvFrame) {
            rendererType = RendererType.RENDERER_YUV;
            drawer.uploadYuvData(
                yuvTextures,
                pendingFrame.width,
                pendingFrame.height,
                pendingFrame.yuvStrides,
                pendingFrame.yuvPlanes);
            // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
            // top-left corner of the image, but in glTexImage2D() the first element corresponds to
            // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
            // sampling matrix.
            final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
            rotatedSamplingMatrix =
                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
          } else {
            rendererType = RendererType.RENDERER_TEXTURE;
            // External texture rendering. Update texture image to latest and make a deep copy of
            // the external texture.
            // TODO(magjed): Move updateTexImage() to the video source instead.
            final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
            surfaceTexture.updateTexImage();
            final float[] samplingMatrix = new float[16];
            surfaceTexture.getTransformMatrix(samplingMatrix);
            rotatedSamplingMatrix =
                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);

            // Reallocate offscreen texture if necessary.
            textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());

            // Bind our offscreen framebuffer.
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
            GlUtil.checkNoGLES2Error("glBindFramebuffer");

            // Copy the OES texture content. This will also normalize the sampling matrix.
            GLES20.glViewport(0, 0, textureCopy.getWidth(), textureCopy.getHeight());
            drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix);
            rotatedSamplingMatrix = RendererCommon.identityMatrix();

            // Restore normal framebuffer.
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
          }
          copyTimeNs += (System.nanoTime() - now);
          VideoRenderer.renderFrameDone(pendingFrame);
          pendingFrame = null;
        }
      }

      // OpenGL defaults to lower left origin - flip vertically.
      GLES20.glViewport(
          displayLayout.left,
          screenHeight - displayLayout.bottom,
          displayLayout.width(),
          displayLayout.height());

      updateLayoutMatrix();
      final float[] texMatrix =
          RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
      if (rendererType == RendererType.RENDERER_YUV) {
        drawer.drawYuv(yuvTextures, texMatrix);
      } else {
        drawer.drawRgb(textureCopy.getTextureId(), texMatrix);
      }

      if (isNewFrame) {
        framesRendered++;
        drawTimeNs += (System.nanoTime() - now);
        if ((framesRendered % 300) == 0) {
          logStatistics();
        }
      }
    }