public void update() {
   if (textureUpdated) {
     textureUpdated = false;
     surfaceTexture.updateTexImage();
     // cameraTexture.getTransformMatrix(mtx);
   }
 }
  /**
   * Prepares OpenGL ES before we draw a frame.
   *
   * @param headTransform The head transformation in the new frame.
   */
  @Override
  public void onNewFrame(HeadTransform headTransform) {
    //        GLES20.glUseProgram(mGlProgram);
    //
    //        mModelViewProjectionParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVP");
    //        mLightPosParam = GLES20.glGetUniformLocation(mGlProgram, "u_LightPos");
    //        mModelViewParam = GLES20.glGetUniformLocation(mGlProgram, "u_MVMatrix");
    //        mModelParam = GLES20.glGetUniformLocation(mGlProgram, "u_Model");
    //        mIsFloorParam = GLES20.glGetUniformLocation(mGlProgram, "u_IsFloor");
    //
    //        // Build the Model part of the ModelView matrix.
    //        Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);
    //
    //        // Build the camera matrix and apply it to the ModelView.
    //        Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f,
    // 0.0f);
    //
    //        headTransform.getHeadView(mHeadView, 0);
    //
    //        checkGLError("onReadyToDraw");

    float[] mtx = new float[16];
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
    surface.updateTexImage();
    surface.getTransformMatrix(mtx);
  }
  /**
   * Latches the next buffer into the texture. Must be called from the thread that created the
   * OutputSurface object.
   */
  public void awaitNewImage() {
    final int TIMEOUT_MS = 2500;

    synchronized (frameSyncObject) {
      while (!frameAvailable) {
        try {
          // Wait for onFrameAvailable() to signal us. Use a timeout
          // to avoid stalling the test if it doesn't arrive.
          frameSyncObject.wait(TIMEOUT_MS);
          if (!frameAvailable) {
            // TODO: if "spurious wakeup", continue while loop
            throw new RuntimeException("Camera frame wait timed out");
          }
        } catch (InterruptedException ie) {
          // shouldn't happen
          throw new RuntimeException(ie);
        }
      }
      frameAvailable = false;
    }

    // Latch the data.
    textureRender.checkGlError("before updateTexImage");
    surfaceTexture.updateTexImage();
  }
 /**
  * Prepares OpenGL ES before we draw a frame.
  *
  * @param headTransform The head transformation in the new frame.
  */
 @Override
 public void onNewFrame(HeadTransform headTransform) {
   float[] mtx = new float[16];
   GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
   surface.updateTexImage();
   surface.getTransformMatrix(mtx);
 }
 @Override
 public void onDrawFrame(final GL10 gl) {
   GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
   runAll(mRunOnDraw);
   mFilter.onDraw(mGLTextureId, mGLCubeBuffer, mGLTextureBuffer);
   runAll(mRunOnDrawEnd);
   if (mSurfaceTexture != null) {
     mSurfaceTexture.updateTexImage();
   }
 }
    public void onDrawFrame(GL10 glUnused) {

      synchronized (this) {
        if (updateSurface) {
          mSurface.updateTexImage();
          mSurface.getTransformMatrix(mSTMatrix);
          updateSurface = false;
        } else {
          return;
        }
      }

      GLES20.glClearColor(255.0f, 255.0f, 255.0f, 1.0f);
      GLES20.glClear(GLES20.GL_DEPTH_BUFFER_BIT | GLES20.GL_COLOR_BUFFER_BIT);

      GLES20.glUseProgram(mProgram);
      checkGlError("glUseProgram");

      GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
      GLES20.glBindTexture(GL_TEXTURE_EXTERNAL_OES, mTextureID);

      mTriangleVertices.position(TRIANGLE_VERTICES_DATA_POS_OFFSET);
      GLES20.glVertexAttribPointer(
          maPositionHandle,
          3,
          GLES20.GL_FLOAT,
          false,
          TRIANGLE_VERTICES_DATA_STRIDE_BYTES,
          mTriangleVertices);
      checkGlError("glVertexAttribPointer maPosition");
      GLES20.glEnableVertexAttribArray(maPositionHandle);
      checkGlError("glEnableVertexAttribArray maPositionHandle");

      mTextureVertices.position(TRIANGLE_VERTICES_DATA_UV_OFFSET);
      GLES20.glVertexAttribPointer(
          maTextureHandle,
          2,
          GLES20.GL_FLOAT,
          false,
          TEXTURE_VERTICES_DATA_STRIDE_BYTES,
          mTextureVertices);

      checkGlError("glVertexAttribPointer maTextureHandle");
      GLES20.glEnableVertexAttribArray(maTextureHandle);
      checkGlError("glEnableVertexAttribArray maTextureHandle");

      Matrix.setIdentityM(mMVPMatrix, 0);

      GLES20.glUniformMatrix4fv(muMVPMatrixHandle, 1, false, mMVPMatrix, 0);
      GLES20.glUniformMatrix4fv(muSTMatrixHandle, 1, false, mSTMatrix, 0);

      GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
      checkGlError("glDrawArrays");
      GLES20.glFinish();
    }
  private boolean captureScreenshotTextureAndSetViewport() {
    if (!attachEglContext()) {
      return false;
    }
    try {
      if (!mTexNamesGenerated) {
        GLES10.glGenTextures(1, mTexNames, 0);
        if (checkGlErrors("glGenTextures")) {
          return false;
        }
        mTexNamesGenerated = true;
      }

      final SurfaceTexture st = new SurfaceTexture(mTexNames[0]);
      final Surface s = new Surface(st);
      try {
        SurfaceControl.screenshot(
            SurfaceControl.getBuiltInDisplay(SurfaceControl.BUILT_IN_DISPLAY_ID_MAIN), s);
      } finally {
        s.release();
      }

      st.updateTexImage();
      st.getTransformMatrix(mTexMatrix);

      // Set up texture coordinates for a quad.
      // We might need to change this if the texture ends up being
      // a different size from the display for some reason.
      mTexCoordBuffer.put(0, 0f);
      mTexCoordBuffer.put(1, 0f);
      mTexCoordBuffer.put(2, 0f);
      mTexCoordBuffer.put(3, 1f);
      mTexCoordBuffer.put(4, 1f);
      mTexCoordBuffer.put(5, 1f);
      mTexCoordBuffer.put(6, 1f);
      mTexCoordBuffer.put(7, 0f);

      // Set up our viewport.
      GLES10.glViewport(0, 0, mDisplayWidth, mDisplayHeight);
      GLES10.glMatrixMode(GLES10.GL_PROJECTION);
      GLES10.glLoadIdentity();
      GLES10.glOrthof(0, mDisplayWidth, 0, mDisplayHeight, 0, 1);
      GLES10.glMatrixMode(GLES10.GL_MODELVIEW);
      GLES10.glLoadIdentity();
      GLES10.glMatrixMode(GLES10.GL_TEXTURE);
      GLES10.glLoadIdentity();
      GLES10.glLoadMatrixf(mTexMatrix, 0);
    } finally {
      detachEglContext();
    }
    return true;
  }
Exemple #8
0
  // Need to call from GL Thread.
  // To update texture image if there is a new available frame from video source.
  public boolean applyUpdate() {
    if (mSurfaceTexture == null) {
      return false;
    }
    if (!mIsNewFrameArrival.get()) {
      return false;
    }

    mIsNewFrameArrival.set(false);
    mSurfaceTexture.updateTexImage();
    onSurfaceTextureUpdated(mSurfaceTexture);
    if (isFirstFrameAndPause()) {
      pause();
      return true;
    }
    return false;
  }
  @Override
  public void process(FilterContext context) {
    if (mLogVerbose) Log.v(TAG, "Processing new frame");

    // First, get new frame if available
    if (mWaitForNewFrame || mFirstFrame) {
      boolean gotNewFrame;
      if (mWaitTimeout != 0) {
        gotNewFrame = mNewFrameAvailable.block(mWaitTimeout);
        if (!gotNewFrame) {
          if (!mCloseOnTimeout) {
            throw new RuntimeException("Timeout waiting for new frame");
          } else {
            if (mLogVerbose) Log.v(TAG, "Timeout waiting for a new frame. Closing.");
            closeOutputPort("video");
            return;
          }
        }
      } else {
        mNewFrameAvailable.block();
      }
      mNewFrameAvailable.close();
      mFirstFrame = false;
    }

    mSurfaceTexture.updateTexImage();

    mSurfaceTexture.getTransformMatrix(mFrameTransform);
    Matrix.multiplyMM(
        mMappedCoords, 0,
        mFrameTransform, 0,
        mSourceCoords, 0);
    mFrameExtractor.setSourceRegion(
        mMappedCoords[0], mMappedCoords[1],
        mMappedCoords[4], mMappedCoords[5],
        mMappedCoords[8], mMappedCoords[9],
        mMappedCoords[12], mMappedCoords[13]);
    // Next, render to output
    Frame output = context.getFrameManager().newFrame(mOutputFormat);
    mFrameExtractor.process(mMediaFrame, output);

    output.setTimestamp(mSurfaceTexture.getTimestamp());

    pushOutput("video", output);
    output.release();
  }
  @Override
  public void draw(GLCanvas canvas, int x, int y, int width, int height) {
    synchronized (this) {
      if (!mHasTexture) return;
      mSurfaceTexture.updateTexImage();
      mSurfaceTexture.getTransformMatrix(mTransform);

      // Flip vertically.
      canvas.save(GLCanvas.SAVE_FLAG_MATRIX);
      int cx = x + width / 2;
      int cy = y + height / 2;
      canvas.translate(cx, cy);
      canvas.scale(1, -1, 1);
      canvas.translate(-cx, -cy);
      canvas.drawTexture(mExtTexture, mTransform, x, y, width, height);
      canvas.restore();
    }
  }
  @Override
  public void onNewFrame(HeadTransform headTransform) {

    Matrix.rotateM(mModelCube, 0, TIME_DELTA, 0.5f, 0.5f, 1.0f);

    // Build the camera matrix and apply it to the ModelView.
    Matrix.setLookAtM(mCamera, 0, 0.0f, 0.0f, CAMERA_Z, 0.0f, 0.0f, 0.0f, 0.0f, 1.0f, 0.0f);

    headTransform.getHeadView(headView, 0);

    checkGLError("onReadyToDraw");

    float[] mtx = new float[16];
    GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
    surface.updateTexImage();
    surface.getTransformMatrix(mtx);
    mHeadTransform = headTransform;
    headTransform.getHeadView(headView, 0);
  }
  /** Tells the VideoPlayerHelper to update the data from the video feed */
  @SuppressLint("NewApi")
  public byte updateVideoData() {
    if (!isPlayableOnTexture()) {
      // DebugLog.LOGD("Cannot update the data of this video since it is not on texture");
      return -1;
    }

    byte result = -1;

    mSurfaceTextureLock.lock();
    if (mSurfaceTexture != null) {
      // Only request an update if currently playing
      if (mCurrentState == MEDIA_STATE.PLAYING) mSurfaceTexture.updateTexImage();

      result = mTextureID;
    }
    mSurfaceTextureLock.unlock();

    return result;
  }
  @Override
  public void onDrawFrame(GL10 gl) {
    // draws the skybox
    if (mUseSkybox) super.onDrawFrame(gl);

    // refresh camera texture
    if (mCameraFrameAvaible) {
      mCameraSurfaceTex.updateTexImage();
      mCameraFrameAvaible = false;
    }
    mCameraSurfaceTex.setOnFrameAvailableListener(this);
    // draw camera surface
    gl.glEnable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
    mCameraSurface.draw(gl, mViewMatrix);
    gl.glDisable(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);

    // draw the viewFinder
    mViewFinder.draw(gl, mViewMatrix);

    // launch memory cleanup??
    Runtime info = Runtime.getRuntime();
    long freeMem = info.freeMemory() / 1048576L;
    if (freeMem < MEMORY_CLEANUP_THRESHOLD) mHasToFreeMemory = true;
    else mHasToFreeMemory = false;

    // the snapshots that are in FOV
    mSnapshotsLock.lock();
    for (Snapshot3D snap : mSnapshots) {
      float distance = this.getSnapshotDisnance(snap);

      if (mHasToFreeMemory && distance > AUTO_UNLOADTEXTURE_ANGLE) {
        snap.unloadGLTexture(gl);
      } else if (this.getSnapshotDisnance(snap) < AUTO_LOADTEXTURE_ANGLE) {
        snap.loadGLTexture(gl);
      }

      if (distance > 120.0f) snap.setVisible(false);
      else {
        snap.setVisible(true);
        snap.draw(gl, super.getRotationMatrix());
      }
    }
    mSnapshotsLock.unlock();

    // ... and then all markers with newly computed alpha
    float d;

    // draw markers
    if (mUseMarkers) {
      mTargetsLock.lock();
      for (Snapshot3D dot : mDots) {
        d = getSnapshotDisnance(dot);
        if (d > 60.0f) {
          dot.setVisible(false);
        } else {
          dot.setVisible(true);
          // Set alpha based on camera distance to the point
          d = d * mMarkersAttenuationFactor / 360.0f;
          d = (d > 1.0f ? 1.0f : d);
          dot.setAlpha(1.0f - d);
          dot.draw(gl, super.getRotationMatrix());
        }
      }
      mTargetsLock.unlock();
    }
    if (mUseContours) {
      mTargetsLock.lock();
      for (Snapshot3D contour : mContours) {
        d = getSnapshotDisnance(contour);

        if (d > 60.0f) {
          contour.setVisible(false);
        } else {
          contour.setVisible(true);
          // Set alpha based on camera distance to the point
          d = d * mMarkersAttenuationFactor / 360.0f;
          d = (d > 1.0f ? 1.0f : d);
          contour.setAlpha(1.0f - d);
          contour.draw(gl, super.getRotationMatrix());
        }
      }
      mTargetsLock.unlock();
    }
  }
  @Override
  public void onDrawFrame(GL10 unused) {
    if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureId);
    boolean showBox = false;

    // Latch the latest frame.  If there isn't anything new, we'll just re-use whatever
    // was there before.
    mSurfaceTexture.updateTexImage();

    // If the recording state is changing, take care of it here.  Ideally we wouldn't
    // be doing all this in onDrawFrame(), but the EGLContext sharing with GLSurfaceView
    // makes it hard to do elsewhere.
    if (mRecordingEnabled) {
      switch (mRecordingStatus) {
        case RECORDING_OFF:
          Log.d(TAG, "START recording");
          // start recording
          mVideoEncoder.startRecording(
              new TextureMovieEncoder.EncoderConfig(
                  mOutputFile, 480, 480, 1000000, EGL14.eglGetCurrentContext()));
          // TODO: get optimal width and height according to specified devices
          mRecordingStatus = RECORDING_ON;
          break;
        case RECORDING_RESUMED:
          Log.d(TAG, "RESUME recording");
          mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
          mRecordingStatus = RECORDING_ON;
          break;
        case RECORDING_ON:
          // yay
          break;
        default:
          throw new RuntimeException("unknown status " + mRecordingStatus);
      }
    } else {
      switch (mRecordingStatus) {
        case RECORDING_ON:
        case RECORDING_RESUMED:
          // stop recording
          Log.d(TAG, "STOP recording");
          mVideoEncoder.stopRecording();
          mRecordingStatus = RECORDING_OFF;
          break;
        case RECORDING_OFF:
          // yay
          break;
        default:
          throw new RuntimeException("unknown status " + mRecordingStatus);
      }
    }

    // Set the video encoder's texture name.  We only need to do this once, but in the
    // current implementation it has to happen after the video encoder is started, so
    // we just do it here.
    //
    // TODO: be less lame.
    mVideoEncoder.setTextureId(mTextureId);

    // Tell the video encoder thread that a new frame is available.
    // This will be ignored if we're not actually recording.
    mVideoEncoder.frameAvailable(mSurfaceTexture);

    if (mIncomingWidth <= 0 || mIncomingHeight <= 0) {
      // Texture size isn't set yet.  This is only used for the filters, but to be
      // safe we can just skip drawing while we wait for the various races to resolve.
      // (This seems to happen if you toggle the screen off/on with power button.)
      Log.i(TAG, "Drawing before incoming texture size set; skipping");
      return;
    }
    // Update the filter, if necessary.
    if (mCurrentFilter != mNewFilter) {
      updateFilter();
    }
    if (mIncomingSizeUpdated) {
      mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight);
      mIncomingSizeUpdated = false;
    }

    // Draw the video frame.
    mSurfaceTexture.getTransformMatrix(mSTMatrix);
    mFullScreen.drawFrame(mTextureId, mSTMatrix);

    // Draw a flashing box if we're recording.  This only appears on screen.
    showBox = (mRecordingStatus == RECORDING_ON);
    if (showBox && (++mFrameCount & 0x04) == 0) {
      drawBox();
    }
  }
    private void draw(GlRectDrawer drawer) {
      if (!seenFrame) {
        // No frame received yet - nothing to render.
        return;
      }
      long now = System.nanoTime();

      final boolean isNewFrame;
      synchronized (pendingFrameLock) {
        isNewFrame = (pendingFrame != null);
        if (isNewFrame && startTimeNs == -1) {
          startTimeNs = now;
        }

        if (isNewFrame) {
          if (pendingFrame.yuvFrame) {
            rendererType = RendererType.RENDERER_YUV;
            drawer.uploadYuvData(
                yuvTextures,
                pendingFrame.width,
                pendingFrame.height,
                pendingFrame.yuvStrides,
                pendingFrame.yuvPlanes);
            // The convention in WebRTC is that the first element in a ByteBuffer corresponds to the
            // top-left corner of the image, but in glTexImage2D() the first element corresponds to
            // the bottom-left corner. We correct this discrepancy by setting a vertical flip as
            // sampling matrix.
            final float[] samplingMatrix = RendererCommon.verticalFlipMatrix();
            rotatedSamplingMatrix =
                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);
          } else {
            rendererType = RendererType.RENDERER_TEXTURE;
            // External texture rendering. Update texture image to latest and make a deep copy of
            // the external texture.
            // TODO(magjed): Move updateTexImage() to the video source instead.
            final SurfaceTexture surfaceTexture = (SurfaceTexture) pendingFrame.textureObject;
            surfaceTexture.updateTexImage();
            final float[] samplingMatrix = new float[16];
            surfaceTexture.getTransformMatrix(samplingMatrix);
            rotatedSamplingMatrix =
                RendererCommon.rotateTextureMatrix(samplingMatrix, pendingFrame.rotationDegree);

            // Reallocate offscreen texture if necessary.
            textureCopy.setSize(pendingFrame.rotatedWidth(), pendingFrame.rotatedHeight());

            // Bind our offscreen framebuffer.
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, textureCopy.getFrameBufferId());
            GlUtil.checkNoGLES2Error("glBindFramebuffer");

            // Copy the OES texture content. This will also normalize the sampling matrix.
            GLES20.glViewport(0, 0, textureCopy.getWidth(), textureCopy.getHeight());
            drawer.drawOes(pendingFrame.textureId, rotatedSamplingMatrix);
            rotatedSamplingMatrix = RendererCommon.identityMatrix();

            // Restore normal framebuffer.
            GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
          }
          copyTimeNs += (System.nanoTime() - now);
          VideoRenderer.renderFrameDone(pendingFrame);
          pendingFrame = null;
        }
      }

      // OpenGL defaults to lower left origin - flip vertically.
      GLES20.glViewport(
          displayLayout.left,
          screenHeight - displayLayout.bottom,
          displayLayout.width(),
          displayLayout.height());

      updateLayoutMatrix();
      final float[] texMatrix =
          RendererCommon.multiplyMatrices(rotatedSamplingMatrix, layoutMatrix);
      if (rendererType == RendererType.RENDERER_YUV) {
        drawer.drawYuv(yuvTextures, texMatrix);
      } else {
        drawer.drawRgb(textureCopy.getTextureId(), texMatrix);
      }

      if (isNewFrame) {
        framesRendered++;
        drawTimeNs += (System.nanoTime() - now);
        if ((framesRendered % 300) == 0) {
          logStatistics();
        }
      }
    }