コード例 #1
0
  @Override
  public void onSurfaceCreated(GL10 unused, EGLConfig config) {
    Log.d(TAG, "onSurfaceCreated");

    // We're starting up or coming back.  Either way we've got a new EGLContext that will
    // need to be shared with the video encoder, so figure out if a recording is already
    // in progress.
    mRecordingEnabled = mVideoEncoder.isRecording();
    if (mRecordingEnabled) {
      mRecordingStatus = RECORDING_RESUMED;
    } else {
      mRecordingStatus = RECORDING_OFF;
    }

    // Set up the texture blitter that will be used for on-screen display.  This
    // is *not* applied to the recording, because that uses a separate shader.
    mFullScreen = new FullFrameRect(new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT));

    mTextureId = mFullScreen.createTextureObject();

    // Create a SurfaceTexture, with an external texture, in this EGL context.  We don't
    // have a Looper in this thread -- GLSurfaceView doesn't create one -- so the frame
    // available messages will arrive on the main thread.
    mSurfaceTexture = new SurfaceTexture(mTextureId);

    // Tell the UI thread to enable the camera preview.
    mCameraHandler.sendMessage(
        mCameraHandler.obtainMessage(
            CameraCaptureActivity.CameraHandler.MSG_SET_SURFACE_TEXTURE, mSurfaceTexture));
  }
コード例 #2
0
 /**
  * Notifies the renderer thread that the activity is pausing.
  *
  * <p>For best results, call this *after* disabling Camera preview.
  */
 public void notifyPausing() {
   if (mSurfaceTexture != null) {
     Log.d(TAG, "renderer pausing -- releasing SurfaceTexture");
     mSurfaceTexture.release();
     mSurfaceTexture = null;
   }
   if (mFullScreen != null) {
     mFullScreen.release(false); // assume the GLSurfaceView EGL context is about
     mFullScreen = null; //  to be destroyed
   }
   mIncomingWidth = mIncomingHeight = -1;
 }
コード例 #3
0
  @Override
  public void onDrawFrame(GL10 unused) {
    if (VERBOSE) Log.d(TAG, "onDrawFrame tex=" + mTextureId);
    boolean showBox = false;

    // Latch the latest frame.  If there isn't anything new, we'll just re-use whatever
    // was there before.
    mSurfaceTexture.updateTexImage();

    // If the recording state is changing, take care of it here.  Ideally we wouldn't
    // be doing all this in onDrawFrame(), but the EGLContext sharing with GLSurfaceView
    // makes it hard to do elsewhere.
    if (mRecordingEnabled) {
      switch (mRecordingStatus) {
        case RECORDING_OFF:
          Log.d(TAG, "START recording");
          // start recording
          mVideoEncoder.startRecording(
              new TextureMovieEncoder.EncoderConfig(
                  mOutputFile, 480, 480, 1000000, EGL14.eglGetCurrentContext()));
          // TODO: get optimal width and height according to specified devices
          mRecordingStatus = RECORDING_ON;
          break;
        case RECORDING_RESUMED:
          Log.d(TAG, "RESUME recording");
          mVideoEncoder.updateSharedContext(EGL14.eglGetCurrentContext());
          mRecordingStatus = RECORDING_ON;
          break;
        case RECORDING_ON:
          // yay
          break;
        default:
          throw new RuntimeException("unknown status " + mRecordingStatus);
      }
    } else {
      switch (mRecordingStatus) {
        case RECORDING_ON:
        case RECORDING_RESUMED:
          // stop recording
          Log.d(TAG, "STOP recording");
          mVideoEncoder.stopRecording();
          mRecordingStatus = RECORDING_OFF;
          break;
        case RECORDING_OFF:
          // yay
          break;
        default:
          throw new RuntimeException("unknown status " + mRecordingStatus);
      }
    }

    // Set the video encoder's texture name.  We only need to do this once, but in the
    // current implementation it has to happen after the video encoder is started, so
    // we just do it here.
    //
    // TODO: be less lame.
    mVideoEncoder.setTextureId(mTextureId);

    // Tell the video encoder thread that a new frame is available.
    // This will be ignored if we're not actually recording.
    mVideoEncoder.frameAvailable(mSurfaceTexture);

    if (mIncomingWidth <= 0 || mIncomingHeight <= 0) {
      // Texture size isn't set yet.  This is only used for the filters, but to be
      // safe we can just skip drawing while we wait for the various races to resolve.
      // (This seems to happen if you toggle the screen off/on with power button.)
      Log.i(TAG, "Drawing before incoming texture size set; skipping");
      return;
    }
    // Update the filter, if necessary.
    if (mCurrentFilter != mNewFilter) {
      updateFilter();
    }
    if (mIncomingSizeUpdated) {
      mFullScreen.getProgram().setTexSize(mIncomingWidth, mIncomingHeight);
      mIncomingSizeUpdated = false;
    }

    // Draw the video frame.
    mSurfaceTexture.getTransformMatrix(mSTMatrix);
    mFullScreen.drawFrame(mTextureId, mSTMatrix);

    // Draw a flashing box if we're recording.  This only appears on screen.
    showBox = (mRecordingStatus == RECORDING_ON);
    if (showBox && (++mFrameCount & 0x04) == 0) {
      drawBox();
    }
  }
コード例 #4
0
  /** Updates the filter program. */
  public void updateFilter() {
    Texture2dProgram.ProgramType programType;
    float[] kernel = null;
    float colorAdj = 0.0f;

    Log.d(TAG, "Updating filter to " + mNewFilter);
    switch (mNewFilter) {
      case CameraCaptureActivity.FILTER_NONE:
        programType = Texture2dProgram.ProgramType.TEXTURE_EXT;
        break;
      case CameraCaptureActivity.FILTER_BLACK_WHITE:
        // (In a previous version the TEXTURE_EXT_BW variant was enabled by a flag called
        // ROSE_COLORED_GLASSES, because the shader set the red channel to the B&W color
        // and green/blue to zero.)
        programType = Texture2dProgram.ProgramType.TEXTURE_EXT_BW;
        break;
      case CameraCaptureActivity.FILTER_BLUR:
        programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
        kernel =
            new float[] {
              1f / 16f, 2f / 16f, 1f / 16f,
              2f / 16f, 4f / 16f, 2f / 16f,
              1f / 16f, 2f / 16f, 1f / 16f
            };
        break;
      case CameraCaptureActivity.FILTER_SHARPEN:
        programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
        kernel = new float[] {0f, -1f, 0f, -1f, 5f, -1f, 0f, -1f, 0f};
        break;
      case CameraCaptureActivity.FILTER_EDGE_DETECT:
        programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
        kernel =
            new float[] {
              -1f, -1f, -1f,
              -1f, 8f, -1f,
              -1f, -1f, -1f
            };
        break;
      case CameraCaptureActivity.FILTER_EMBOSS:
        programType = Texture2dProgram.ProgramType.TEXTURE_EXT_FILT;
        kernel =
            new float[] {
              2f, 0f, 0f,
              0f, -1f, 0f,
              0f, 0f, -1f
            };
        colorAdj = 0.5f;
        break;
      default:
        throw new RuntimeException("Unknown filter mode " + mNewFilter);
    }

    // Do we need a whole new program?  (We want to avoid doing this if we don't have
    // too -- compiling a program could be expensive.)
    if (programType != mFullScreen.getProgram().getProgramType()) {
      mFullScreen.changeProgram(new Texture2dProgram(programType));
      // If we created a new program, we need to initialize the texture width/height.
      mIncomingSizeUpdated = true;
    }

    // Update the filter kernel (if any).
    if (kernel != null) {
      mFullScreen.getProgram().setKernel(kernel, colorAdj);
    }

    mCurrentFilter = mNewFilter;
  }