@Override
  public void onTextureFrameAvailable(int oesTextureId, float[] transformMatrix, long timestampNs) {
    checkIsOnCameraThread();
    if (camera == null) {
      // Camera is stopped, we need to return the buffer immediately.
      surfaceHelper.returnTextureFrame();
      return;
    }
    if (!dropNextFrame) {
      surfaceHelper.returnTextureFrame();
      dropNextFrame = true;
      return;
    }

    int rotation = getFrameOrientation();
    if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
      // Undo the mirror that the OS "helps" us with.
      // http://developer.android.com/reference/android/hardware/Camera.html#setDisplayOrientation(int)
      transformMatrix =
          RendererCommon.multiplyMatrices(transformMatrix, RendererCommon.horizontalFlipMatrix());
    }
    transformMatrix = RendererCommon.rotateTextureMatrix(transformMatrix, rotation);

    final int rotatedWidth = (rotation % 180 == 0) ? captureFormat.width : captureFormat.height;
    final int rotatedHeight = (rotation % 180 == 0) ? captureFormat.height : captureFormat.width;
    cameraStatistics.addPendingFrame(timestampNs);
    frameObserver.onTextureFrameCaptured(
        rotatedWidth, rotatedHeight, oesTextureId, transformMatrix, timestampNs);
  }
 private VideoCapturerAndroid(
     int cameraId, CameraEventsHandler eventsHandler, EGLContext sharedContext) {
   Logging.d(TAG, "VideoCapturerAndroid");
   this.id = cameraId;
   this.eventsHandler = eventsHandler;
   cameraThread = new HandlerThread(TAG);
   cameraThread.start();
   cameraThreadHandler = new Handler(cameraThread.getLooper());
   videoBuffers = new FramePool(cameraThread);
   isCapturingToTexture = (sharedContext != null);
   cameraStatistics =
       new CameraStatistics(isCapturingToTexture ? 1 : videoBuffers.numCaptureBuffers);
   surfaceHelper =
       SurfaceTextureHelper.create(
           isCapturingToTexture ? sharedContext : EGL10.EGL_NO_CONTEXT, cameraThreadHandler);
   if (isCapturingToTexture) {
     surfaceHelper.setListener(this);
   }
 }
  /**
   * Test use SurfaceTextureHelper on a separate thread. A uniform texture frame is created and
   * received on a thread separate from the test thread and returned after disconnect.
   */
  @MediumTest
  public static void testLateReturnFrameOnSeparateThread() throws InterruptedException {
    final HandlerThread thread = new HandlerThread("SurfaceTextureHelperTestThread");
    thread.start();
    final Handler handler = new Handler(thread.getLooper());

    // Create SurfaceTextureHelper and listener.
    final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(null, handler);
    // Create a mock listener and expect frames to be delivered on |thread|.
    final MockTextureListener listener = new MockTextureListener(thread);
    surfaceTextureHelper.setListener(listener);

    // Create resources for stubbing an OES texture producer. |eglOesBase| has the
    // SurfaceTexture in |surfaceTextureHelper| as the target EGLSurface.
    final EglBase eglOesBase = EglBase.create(null, EglBase.ConfigType.PLAIN);
    eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
    eglOesBase.makeCurrent();
    // Draw a frame onto the SurfaceTexture.
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    // swapBuffers() will ultimately trigger onTextureFrameAvailable().
    eglOesBase.swapBuffers();
    eglOesBase.release();

    // Wait for an OES texture to arrive.
    listener.waitForNewFrame();

    surfaceTextureHelper.disconnect(handler);

    surfaceTextureHelper.returnTextureFrame();
  }
  /**
   * Test disconnecting the SurfaceTextureHelper, but keep trying to produce more texture frames. No
   * frames should be delivered to the listener.
   */
  @MediumTest
  public static void testDisconnect() throws InterruptedException {
    // Create SurfaceTextureHelper and listener.
    final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(null);
    final MockTextureListener listener = new MockTextureListener();
    surfaceTextureHelper.setListener(listener);
    // Create EglBase with the SurfaceTexture as target EGLSurface.
    final EglBase eglBase = EglBase.create(null, EglBase.ConfigType.PLAIN);
    eglBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
    eglBase.makeCurrent();
    // Assert no frame has been received yet.
    assertFalse(listener.waitForNewFrame(1));
    // Draw and wait for one frame.
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    // swapBuffers() will ultimately trigger onTextureFrameAvailable().
    eglBase.swapBuffers();
    listener.waitForNewFrame();
    surfaceTextureHelper.returnTextureFrame();

    // Disconnect - we should not receive any textures after this.
    surfaceTextureHelper.disconnect();

    // Draw one frame.
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    eglBase.swapBuffers();
    // swapBuffers() should not trigger onTextureFrameAvailable() because we are disconnected.
    // Assert that no OES texture was delivered.
    assertFalse(listener.waitForNewFrame(500));

    eglBase.release();
  }
 // Called by native code to quit the camera thread. This needs to be done manually, otherwise the
 // thread and handler will not be garbage collected.
 private void release() {
   Logging.d(TAG, "release");
   if (isReleased()) {
     throw new IllegalStateException("Already released");
   }
   ThreadUtils.invokeUninterruptibly(
       cameraThreadHandler,
       new Runnable() {
         @Override
         public void run() {
           if (camera != null) {
             throw new IllegalStateException("Release called while camera is running");
           }
           if (cameraStatistics.pendingFramesCount() != 0) {
             throw new IllegalStateException("Release called with pending frames left");
           }
         }
       });
   surfaceHelper.disconnect();
   cameraThread.quit();
   ThreadUtils.joinUninterruptibly(cameraThread);
   cameraThread = null;
 }
  private void startCaptureOnCameraThread(
      final int width,
      final int height,
      final int framerate,
      final CapturerObserver frameObserver,
      final Context applicationContext) {
    Throwable error = null;
    checkIsOnCameraThread();
    if (camera != null) {
      throw new RuntimeException("Camera has already been started.");
    }
    this.applicationContext = applicationContext;
    this.frameObserver = frameObserver;
    this.firstFrameReported = false;

    try {
      try {
        synchronized (cameraIdLock) {
          Logging.d(TAG, "Opening camera " + id);
          if (eventsHandler != null) {
            eventsHandler.onCameraOpening(id);
          }
          camera = Camera.open(id);
          info = new Camera.CameraInfo();
          Camera.getCameraInfo(id, info);
        }
      } catch (RuntimeException e) {
        openCameraAttempts++;
        if (openCameraAttempts < MAX_OPEN_CAMERA_ATTEMPTS) {
          Logging.e(TAG, "Camera.open failed, retrying", e);
          openCameraOnCodecThreadRunner =
              new Runnable() {
                @Override
                public void run() {
                  startCaptureOnCameraThread(
                      width, height, framerate, frameObserver, applicationContext);
                }
              };
          cameraThreadHandler.postDelayed(openCameraOnCodecThreadRunner, OPEN_CAMERA_DELAY_MS);
          return;
        }
        openCameraAttempts = 0;
        throw e;
      }

      try {
        camera.setPreviewTexture(surfaceHelper.getSurfaceTexture());
      } catch (IOException e) {
        Logging.e(TAG, "setPreviewTexture failed", error);
        throw new RuntimeException(e);
      }

      Logging.d(
          TAG,
          "Camera orientation: "
              + info.orientation
              + " .Device orientation: "
              + getDeviceOrientation());
      camera.setErrorCallback(cameraErrorCallback);
      startPreviewOnCameraThread(width, height, framerate);
      frameObserver.onCapturerStarted(true);

      // Start camera observer.
      cameraThreadHandler.postDelayed(cameraObserver, CAMERA_OBSERVER_PERIOD_MS);
      return;
    } catch (RuntimeException e) {
      error = e;
    }
    Logging.e(TAG, "startCapture failed", error);
    stopCaptureOnCameraThread();
    frameObserver.onCapturerStarted(false);
    if (eventsHandler != null) {
      eventsHandler.onCameraError("Camera can not be started.");
    }
    return;
  }
 /**
  * Test disconnecting the SurfaceTextureHelper immediately after is has been setup to use a shared
  * context. No frames should be delivered to the listener.
  */
 @SmallTest
 public static void testDisconnectImmediately() {
   final SurfaceTextureHelper surfaceTextureHelper = SurfaceTextureHelper.create(null);
   surfaceTextureHelper.disconnect();
 }
  /**
   * Test disconnecting the SurfaceTextureHelper while holding a pending texture frame. The pending
   * texture frame should still be valid, and this is tested by drawing the texture frame to a pixel
   * buffer and reading it back with glReadPixels().
   */
  @MediumTest
  public static void testLateReturnFrame() throws InterruptedException {
    final int width = 16;
    final int height = 16;
    // Create EGL base with a pixel buffer as display output.
    final EglBase eglBase = EglBase.create(null, EglBase.ConfigType.PIXEL_BUFFER);
    eglBase.createPbufferSurface(width, height);

    // Create SurfaceTextureHelper and listener.
    final SurfaceTextureHelper surfaceTextureHelper =
        SurfaceTextureHelper.create(eglBase.getEglBaseContext());
    final MockTextureListener listener = new MockTextureListener();
    surfaceTextureHelper.setListener(listener);
    surfaceTextureHelper.getSurfaceTexture().setDefaultBufferSize(width, height);

    // Create resources for stubbing an OES texture producer. |eglOesBase| has the SurfaceTexture in
    // |surfaceTextureHelper| as the target EGLSurface.
    final EglBase eglOesBase =
        EglBase.create(eglBase.getEglBaseContext(), EglBase.ConfigType.PLAIN);
    eglOesBase.createSurface(surfaceTextureHelper.getSurfaceTexture());
    assertEquals(eglOesBase.surfaceWidth(), width);
    assertEquals(eglOesBase.surfaceHeight(), height);

    final int red = 79;
    final int green = 66;
    final int blue = 161;
    // Draw a constant color frame onto the SurfaceTexture.
    eglOesBase.makeCurrent();
    GLES20.glClearColor(red / 255.0f, green / 255.0f, blue / 255.0f, 1.0f);
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
    // swapBuffers() will ultimately trigger onTextureFrameAvailable().
    eglOesBase.swapBuffers();
    eglOesBase.release();

    // Wait for OES texture frame.
    listener.waitForNewFrame();
    // Diconnect while holding the frame.
    surfaceTextureHelper.disconnect();

    // Draw the pending texture frame onto the pixel buffer.
    eglBase.makeCurrent();
    final GlRectDrawer drawer = new GlRectDrawer();
    drawer.drawOes(listener.oesTextureId, listener.transformMatrix);
    drawer.release();

    // Download the pixels in the pixel buffer as RGBA. Not all platforms support RGB, e.g. Nexus 9.
    final ByteBuffer rgbaData = ByteBuffer.allocateDirect(width * height * 4);
    GLES20.glReadPixels(0, 0, width, height, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, rgbaData);
    GlUtil.checkNoGLES2Error("glReadPixels");
    eglBase.release();

    // Assert rendered image is expected constant color.
    while (rgbaData.hasRemaining()) {
      assertEquals(rgbaData.get() & 0xFF, red);
      assertEquals(rgbaData.get() & 0xFF, green);
      assertEquals(rgbaData.get() & 0xFF, blue);
      assertEquals(rgbaData.get() & 0xFF, 255);
    }
    // Late frame return after everything has been disconnected and released.
    surfaceTextureHelper.returnTextureFrame();
  }