예제 #1
0
 public void onSurfaceTextureAvailable(
     SurfaceTexture paramSurfaceTexture, int paramInt1, int paramInt2) {
   if (VideoCaptureFragment.h(a) == null) {
     VideoCaptureFragment.a(a, Camera.open());
     Object localObject1 = VideoCaptureFragment.h(a).getParameters();
     Object localObject2 = VideoCaptureFragment.i(a);
     Camera localCamera = VideoCaptureFragment.h(a);
     localCamera.getClass();
     localObject2 = new Camera.Size(localCamera, videoFrameWidth, videoFrameHeight);
     if (((Camera.Parameters) localObject1).getSupportedPreviewSizes().contains(localObject2)) {
       ((Camera.Parameters) localObject1).setPreviewSize(width, height);
       VideoCaptureFragment.h(a).setParameters((Camera.Parameters) localObject1);
     }
     localObject1 = ((Camera.Parameters) localObject1).getPreviewSize();
     VideoCaptureFragment.a(a, width, height);
     VideoCaptureFragment.h(a).setDisplayOrientation(VideoCaptureFragment.j(a));
     try {
       VideoCaptureFragment.h(a).setPreviewTexture(paramSurfaceTexture);
       VideoCaptureFragment.h(a).startPreview();
       paramSurfaceTexture = VideoCaptureFragment.h(a).getParameters().getSupportedFocusModes();
       if (CameraWrangler.a(VideoCaptureFragment.h(a))) {
         VideoCaptureFragment.h(a).autoFocus(null);
         return;
       }
       if (paramSurfaceTexture.contains("continuous-video")) {
         paramSurfaceTexture = VideoCaptureFragment.h(a).getParameters();
         paramSurfaceTexture.setFocusMode("continuous-video");
         VideoCaptureFragment.h(a).setParameters(paramSurfaceTexture);
         return;
       }
     } catch (IOException paramSurfaceTexture) {
       YelpLog.error(paramSurfaceTexture);
     }
   }
 }
  /**
   * Prepares OpenGL ES before we draw a frame.
   *
   * @param headTransform The head transformation in the new frame.
   */
  @Override
  public void onNewFrame(HeadTransform headTransform) {
    //        GLES20.glUseProgram(mGlProgram);
    //
    //        mModelViewProjectionParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVP");
    //        mLightPosParam = GLES20.glGetUniformLocation(mGlProgram, "u_LightPos");
    //        mModelViewParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVMatrix");
    //        mModelParam = GLES20.glGetUniformLocation(mGlProgram, "u_Model");
    //        mIsFloorParam = GLES20.glGetUniformLocation(mGlProgram, "u_IsFloor");
    //
    //        // Build the Model part of the ModelView matrix.
    //        Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);
    //
    //        // Build the camera matrix and apply it to the ModelView.
    //        Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f,
    // 0.0f);
    //
    //        headTransform.getHeadView(mHeadView, 0);
    //
    //        checkGLError("onReadyToDraw");

    float[] mtx = new float[16];
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
    surface.updateTexImage();
    surface.getTransformMatrix(mtx);
  }
예제 #3
0
 /**
  * Prepares OpenGL ES before we draw a frame.
  *
  * @param headTransform The head transformation in the new frame.
  */
 @Override
 public void onNewFrame(HeadTransform headTransform) {
   float[] mtx = new float[16];
   GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
   surface.updateTexImage();
   surface.getTransformMatrix(mtx);
 }
  protected SurfaceTexture createSurfaceTexture() {
    int tex[] = new int[1];
    GLES20.glGenTextures(1, tex, 0);
    //		GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
    GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, tex[0]);
    GLES20.glTexParameteri(
        GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(
        GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
    GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_LINEAR);
    GLES20.glTexParameteri(GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_LINEAR);
    textureName = tex[0];
    Log.d("OESTexture", "createSurfaceTexture textureName:" + textureName);
    final SurfaceTexture t = new SurfaceTexture(tex[0]);
    t.setOnFrameAvailableListener(
        new SurfaceTexture.OnFrameAvailableListener() {

          @Override
          public void onFrameAvailable(SurfaceTexture surfaceTexture) {
            textureUpdated = true;
            // Log.d("OESTexture", "onFrameAvailable textureName:" + textureName);
          }
        });
    return t;
  }
예제 #5
0
  /** Creates a new {@link CameraCaptureSession} for camera preview. */
  @TargetApi(CAMERA_2_API_LIMIT)
  private void createCameraPreviewSession() {
    try {
      SurfaceTexture texture = mTextureView.getSurfaceTexture();
      assert texture != null;

      // We configure the size of default buffer to be the size of camera preview we want.
      texture.setDefaultBufferSize(mPreviewSize.getWidth(), mPreviewSize.getHeight());

      // This is the output Surface we need to start preview.
      Surface surface = new Surface(texture);

      // We set up a CaptureRequest.Builder with the output Surface.
      mCamera2CaptureRequestBuilder =
          mCamera2Device.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
      mCamera2CaptureRequestBuilder.addTarget(surface);

      // Here, we create a CameraCaptureSession for camera preview.
      mCamera2Device.createCaptureSession(
          Arrays.asList(surface, mImageReader.getSurface()),
          new CameraCaptureSession.StateCallback() {

            @Override
            public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
              // The camera is already closed
              if (null == mCamera2Device) {
                return;
              }

              // When the session is ready, we start displaying the preview.
              mCamera2CaptureSession = cameraCaptureSession;
              try {
                // Auto focus should be continuous for camera preview.
                mCamera2CaptureRequestBuilder.set(
                    CaptureRequest.CONTROL_AF_MODE,
                    CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
                // Flash is automatically enabled when necessary.
                setAutoFlash(mCamera2CaptureRequestBuilder);

                // Finally, we start displaying the camera preview.
                mCamera2CaptureRequest = mCamera2CaptureRequestBuilder.build();
                mCamera2CaptureSession.setRepeatingRequest(
                    mCamera2CaptureRequest, mCapture2Callback, mBackgroundHandler);
              } catch (CameraAccessException e) {
                e.printStackTrace();
              }
            }

            @Override
            public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
              Etils.showToast(CameraActivity.this, "Failed");
            }
          },
          null);
    } catch (CameraAccessException e) {
      e.printStackTrace();
    }
  }
 public void acquireSurfaceTexture() {
   mExtTexture = new ExtTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
   mExtTexture.setSize(mWidth, mHeight);
   mSurfaceTexture = new SurfaceTexture(mExtTexture.getId());
   mSurfaceTexture.setDefaultBufferSize(mWidth, mHeight);
   mSurfaceTexture.setOnFrameAvailableListener(this);
   synchronized (this) {
     mHasTexture = true;
   }
 }
    public void onDrawFrame(GL10 glUnused) {

      synchronized (this) {
        if (updateSurface) {
          mSurface.updateTexImage();
          mSurface.getTransformMatrix(mSTMatrix);
          updateSurface = false;
        } else {
          return;
        }
      }

      GLES20.glClearColor(255.0f, 255.0f, 255.0f, 1.0f);
      GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

      GLES20.glUseProgram(mProgram);
      checkGlError("glUseProgram");

      GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
      GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);

      mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
      GLES20.glVertexAttribPointer(
          maPositionHandle,
          3,
          GLES20.GL_FLOAT,
          false,
          TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
          mTriangleVertices);
      checkGlError("glVertexAttribPointer maPosition");
      GLES20.glEnableVertexAttribArray(maPositionHandle);
      checkGlError("glEnableVertexAttribArray maPositionHandle");

      mTextureVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
      GLES20.glVertexAttribPointer(
          maTextureHandle,
          2,
          GLES20.GL_FLOAT,
          false,
          TEXTURE_VERTICES_DATA_STRIDE_BYTES,
          mTextureVertices);

      checkGlError("glVertexAttribPointer maTextureHandle");
      GLES20.glEnableVertexAttribArray(maTextureHandle);
      checkGlError("glEnableVertexAttribArray maTextureHandle");

      Matrix.setIdentityM(mMVPMatrix, 0);

      GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
      GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

      GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
      checkGlError("glDrawArrays");
      GLES20.glFinish();
    }
예제 #8
0
  private boolean captureScreenshotTextureAndSetViewport() {
    if (!attachEglContext()) {
      return false;
    }
    try {
      if (!mTexNamesGenerated) {
        GLES10.glGenTextures(1, mTexNames, 0);
        if (checkGlErrors("glGenTextures")) {
          return false;
        }
        mTexNamesGenerated = true;
      }

      final SurfaceTexture st = new SurfaceTexture(mTexNames[0]);
      final Surface s = new Surface(st);
      try {
        SurfaceControl.screenshot(
            SurfaceControl.getBuiltInDisplay(SurfaceControl.BUILT_IN_DISPLAY_ID_MAIN), s);
      } finally {
        s.release();
      }

      st.updateTexImage();
      st.getTransformMatrix(mTexMatrix);

      // Set up texture coordinates for a quad.
      // We might need to change this if the texture ends up being
      // a different size from the display for some reason.
      mTexCoordBuffer.put(0, 0f);
      mTexCoordBuffer.put(1, 0f);
      mTexCoordBuffer.put(2, 0f);
      mTexCoordBuffer.put(3, 1f);
      mTexCoordBuffer.put(4, 1f);
      mTexCoordBuffer.put(5, 1f);
      mTexCoordBuffer.put(6, 1f);
      mTexCoordBuffer.put(7, 0f);

      // Set up our viewport.
      GLES10.glViewport(0, 0, mDisplayWidth, mDisplayHeight);
      GLES10.glMatrixMode(GLES10.GL_PROJECTION);
      GLES10.glLoadIdentity();
      GLES10.glOrthof(0, mDisplayWidth, 0, mDisplayHeight, 0, 1);
      GLES10.glMatrixMode(GLES10.GL_MODELVIEW);
      GLES10.glLoadIdentity();
      GLES10.glMatrixMode(GLES10.GL_TEXTURE);
      GLES10.glLoadIdentity();
      GLES10.glLoadMatrixf(mTexMatrix, 0);
    } finally {
      detachEglContext();
    }
    return true;
  }
 @Override
 public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
   if (surface.equals(mCameraPreview.getSurfaceTexture())) {
     if (DBG) log("CameraPreview surface texture destroyed");
     stopRecordingAndPreview();
     closeCamera();
     mCameraSurface = null;
   } else if (surface.equals(mFarEndView.getSurfaceTexture())) {
     if (DBG) log("FarEndView surface texture destroyed");
     mFarEndSurface = null;
     mVideoCallManager.setFarEndSurface(null);
   }
   return true;
 }
 @Override
 public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
   if (surface.equals(mCameraPreview.getSurfaceTexture())) {
     if (DBG) log("Camera surface texture created");
     mCameraSurface = surface;
     if (isCameraInitNeeded()) {
       initializeCamera();
     }
   } else if (surface.equals(mFarEndView.getSurfaceTexture())) {
     if (DBG) log("Video surface texture created");
     mFarEndSurface = surface;
     mVideoCallManager.setFarEndSurface(mFarEndSurface);
   }
 }
예제 #11
0
  @Override
  protected void onVisibilityChanged(View changedView, int visibility) {
    super.onVisibilityChanged(changedView, visibility);

    if (mSurface != null) {
      // When the view becomes invisible, stop updating it, it's a waste of CPU
      // To cancel updates, the easiest thing to do is simply to remove the
      // updates listener
      if (visibility == VISIBLE) {
        mSurface.setOnFrameAvailableListener(mUpdateListener);
        updateLayerAndInvalidate();
      } else {
        mSurface.setOnFrameAvailableListener(null);
      }
    }
  }
예제 #12
0
 @Override
 public void close(FilterContext context) {
   if (mLogVerbose) Log.v(TAG, "SurfaceTextureSource closed");
   mSourceListener.onSurfaceTextureSourceReady(null);
   mSurfaceTexture.release();
   mSurfaceTexture = null;
 }
예제 #13
0
 public void update() {
   if (textureUpdated) {
     textureUpdated = false;
     surfaceTexture.updateTexImage();
     // cameraTexture.getTransformMatrix(mtx);
   }
 }
예제 #14
0
  /**
   * Latches the next buffer into the texture. Must be called from the thread that created the
   * OutputSurface object.
   */
  public void awaitNewImage() {
    final int TIMEOUT_MS = 2500;

    synchronized (frameSyncObject) {
      while (!frameAvailable) {
        try {
          // Wait for onFrameAvailable() to signal us. Use a timeout
          // to avoid stalling the test if it doesn't arrive.
          frameSyncObject.wait(TIMEOUT_MS);
          if (!frameAvailable) {
            // TODO: if "spurious wakeup", continue while loop
            throw new RuntimeException("Camera frame wait timed out");
          }
        } catch (InterruptedException ie) {
          // shouldn't happen
          throw new RuntimeException(ie);
        }
      }
      frameAvailable = false;
    }

    // Latch the data.
    textureRender.checkGlError("before updateTexImage");
    surfaceTexture.updateTexImage();
  }
예제 #15
0
  @Override
  public void process(FilterContext context) {
    if (mLogVerbose) Log.v(TAG, "Processing new frame");

    // First, get new frame if available
    if (mWaitForNewFrame || mFirstFrame) {
      boolean gotNewFrame;
      if (mWaitTimeout != 0) {
        gotNewFrame = mNewFrameAvailable.block(mWaitTimeout);
        if (!gotNewFrame) {
          if (!mCloseOnTimeout) {
            throw new RuntimeException("Timeout waiting for new frame");
          } else {
            if (mLogVerbose) Log.v(TAG, "Timeout waiting for a new frame. Closing.");
            closeOutputPort("video");
            return;
          }
        }
      } else {
        mNewFrameAvailable.block();
      }
      mNewFrameAvailable.close();
      mFirstFrame = false;
    }

    mSurfaceTexture.updateTexImage();

    mSurfaceTexture.getTransformMatrix(mFrameTransform);
    Matrix.multiplyMM(
        mMappedCoords, 0,
        mFrameTransform, 0,
        mSourceCoords, 0);
    mFrameExtractor.setSourceRegion(
        mMappedCoords[0], mMappedCoords[1],
        mMappedCoords[4], mMappedCoords[5],
        mMappedCoords[8], mMappedCoords[9],
        mMappedCoords[12], mMappedCoords[13]);
    // Next, render to output
    Frame output = context.getFrameManager().newFrame(mOutputFormat);
    mFrameExtractor.process(mMediaFrame, output);

    output.setTimestamp(mSurfaceTexture.getTimestamp());

    pushOutput("video", output);
    output.release();
  }
예제 #16
0
 /**
  * Retrieve the 4x4 texture coordinate transform matrix associated with the texture image set by
  * the most recent call to updateTexImage.
  *
  * @param matrix the array into which the 4x4 matrix will be stored. The array must have exactly
  *     16 elements.
  * @return true for success to get the matrix and false for fail to get it.
  */
 public boolean getTransformMatrix(float[] matrix) {
   if (mSurfaceTexture == null) {
     return false;
   } else {
     mSurfaceTexture.getTransformMatrix(matrix);
     return true;
   }
 }
예제 #17
0
 public void destroy() {
   stopPlayback();
   mIsNewFrameArrival.set(false);
   if (mSurfaceTexture != null) {
     mSurfaceTexture.release();
     mSurfaceTexture = null;
   }
 }
 /** Connects the SurfaceTexture to the Camera preview output, and starts the preview. */
 private void handleSetSurfaceTexture(SurfaceTexture st) {
   st.setOnFrameAvailableListener(this);
   try {
     mCamera.setPreviewTexture(st);
   } catch (IOException ioe) {
     throw new RuntimeException(ioe);
   }
   mCamera.startPreview();
 }
 private void startSession() throws CameraAccessException {
   mSurfaceTexture = new SurfaceTexture(false);
   Size size = getSmallestSize(mCameraDevice.getId());
   mSurfaceTexture.setDefaultBufferSize(size.getWidth(), size.getHeight());
   mSurface = new Surface(mSurfaceTexture);
   ArrayList<Surface> outputs = new ArrayList<>(1);
   outputs.add(mSurface);
   mCameraDevice.createCaptureSession(outputs, mTorchSessionListener, mHandler);
 }
예제 #20
0
  public void initialize(int textureName) {
    if (mSurfaceTexture != null) {
      mSurfaceTexture.release();
      mSurfaceTexture = null;
    }

    mSurfaceTexture = new SurfaceTexture(textureName);
    mSurfaceTexture.setOnFrameAvailableListener(
        new SurfaceTexture.OnFrameAvailableListener() {
          public void onFrameAvailable(SurfaceTexture surfaceTexture) {
            mIsNewFrameArrival.compareAndSet(false, true);
          }
        });

    mSurface = new Surface(mSurfaceTexture);
    prepareVideo();
    start();
  }
  @Override
  public void draw(GLCanvas canvas, int x, int y, int width, int height) {
    synchronized (this) {
      if (!mHasTexture) return;
      mSurfaceTexture.updateTexImage();
      mSurfaceTexture.getTransformMatrix(mTransform);

      // Flip vertically.
      canvas.save(GLCanvas.SAVE_FLAG_MATRIX);
      int cx = x + width / 2;
      int cy = y + height / 2;
      canvas.translate(cx, cy);
      canvas.scale(1, -1, 1);
      canvas.translate(-cx, -cy);
      canvas.drawTexture(mExtTexture, mTransform, x, y, width, height);
      canvas.restore();
    }
  }
 public void releaseSurfaceTexture() {
   synchronized (this) {
     mHasTexture = false;
   }
   mExtTexture.recycle();
   mExtTexture = null;
   mSurfaceTexture.release();
   mSurfaceTexture = null;
 }
예제 #23
0
  /** Creates instances of CameraTextureRender and SurfaceTexture. */
  public SurfaceTextureManager() {

    textureRender = new CameraTextureRender();
    textureRender.surfaceCreated();

    logger.debug("textureID=" + textureRender.getTextureId());
    surfaceTexture = new SurfaceTexture(textureRender.getTextureId());

    surfaceTexture.setOnFrameAvailableListener(this);
  }
예제 #24
0
  @Override
  public void onNewFrame(HeadTransform headTransform) {

    Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);

    // Build the camera matrix and apply it to the ModelView.
    Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);

    headTransform.getHeadView(headView, 0);

    checkGLError("onReadyToDraw");

    float[] mtx = new float[16];
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
    surface.updateTexImage();
    surface.getTransformMatrix(mtx);
    mHeadTransform = headTransform;
    headTransform.getHeadView(headView, 0);
  }
예제 #25
0
 @Override
 public void onDrawFrame(final GL10 gl) {
   GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
   runAll(mRunOnDraw);
   mFilter.onDraw(mGLTextureId, mGLCubeBuffer, mGLTextureBuffer);
   runAll(mRunOnDrawEnd);
   if (mSurfaceTexture != null) {
     mSurfaceTexture.updateTexImage();
   }
 }
예제 #26
0
 public void release() {
   if (textureName > 0) {
     GLES20.glDeleteTextures(0, new int[] {textureName}, 0);
     textureName = -1;
   }
   if (surfaceTexture != null) {
     surfaceTexture.release();
     surfaceTexture = null;
   }
 }
  public void startCamera(int texture) {
    surface = new SurfaceTexture(texture);
    surface.setOnFrameAvailableListener(this);
    renderer.setSurface(surface);

    mCamera = Camera.open();

    mCamera.setPreviewTexture(surface);
    mCamera.startPreview();
  }
예제 #28
0
 @Override
 protected void onSizeChanged(int w, int h, int oldw, int oldh) {
   super.onSizeChanged(w, h, oldw, oldh);
   if (mSurface != null) {
     mSurface.setDefaultBufferSize(getWidth(), getHeight());
     updateLayer();
     if (mListener != null) {
       mListener.onSurfaceTextureSizeChanged(mSurface, getWidth(), getHeight());
     }
   }
 }
예제 #29
0
 @TargetApi(Build.VERSION_CODES.JELLY_BEAN_MR2)
 public void release() {
   if (textureRender != null) {
     textureRender.release();
     textureRender = null;
   }
   if (surfaceTexture != null) {
     surfaceTexture.release();
     surfaceTexture = null;
   }
 }
예제 #30
0
 /**
  * Set the {@link SurfaceTexture} for this view to use. If a {@link SurfaceTexture} is already
  * being used by this view, it is immediately released and not be usable any more. The {@link
  * SurfaceTextureListener#onSurfaceTextureDestroyed} callback is <b>not</b> called for the
  * previous {@link SurfaceTexture}. Similarly, the {@link
  * SurfaceTextureListener#onSurfaceTextureAvailable} callback is <b>not</b> called for the {@link
  * SurfaceTexture} passed to setSurfaceTexture.
  *
  * <p>The {@link SurfaceTexture} object must be detached from all OpenGL ES contexts prior to
  * calling this method.
  *
  * @param surfaceTexture The {@link SurfaceTexture} that the view should use.
  * @see SurfaceTexture#detachFromGLContext()
  */
 public void setSurfaceTexture(SurfaceTexture surfaceTexture) {
   if (surfaceTexture == null) {
     throw new NullPointerException("surfaceTexture must not be null");
   }
   if (mSurface != null) {
     mSurface.release();
   }
   mSurface = surfaceTexture;
   mUpdateSurface = true;
   invalidateParentIfNeeded();
 }