Beispiel #1
0
 /**
  * Method to set byte array to the MediaCodec encoder
  *
  * @param buffer
  * @param length length of byte array, zero means EOS.
  * @param presentationTimeUs
  */
 protected void encode(byte[] buffer, int length, long presentationTimeUs) {
   if (!mIsCapturing) return;
   int ix = 0, sz;
   final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
   while (mIsCapturing && ix < length) {
     final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
     if (inputBufferIndex >= 0) {
       final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
       inputBuffer.clear();
       sz = inputBuffer.remaining();
       sz = (ix + sz < length) ? sz : length - ix;
       if (sz > 0 && (buffer != null)) {
         inputBuffer.put(buffer, ix, sz);
       }
       ix += sz;
       //	            if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
       if (length <= 0) {
         // send EOS
         mIsEOS = true;
         if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
         mMediaCodec.queueInputBuffer(
             inputBufferIndex, 0, 0, presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
         break;
       } else {
         mMediaCodec.queueInputBuffer(inputBufferIndex, 0, sz, presentationTimeUs, 0);
       }
     } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
       // wait for MediaCodec encoder is ready to encode
       // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
       // will wait for maximum TIMEOUT_USEC(10msec) on each call
     }
   }
 }
 private final void handleStart() {
   if (DEBUG) Log.v(TAG, "handleStart:");
   synchronized (mSync) {
     if (mState != STATE_PREPARED) throw new RuntimeException("invalid state:" + mState);
     mState = STATE_PLAYING;
   }
   if (mRequestTime > 0) {
     handleSeek(mRequestTime);
   }
   previousVideoPresentationTimeUs = -1;
   mVideoInputDone = mVideoOutputDone = true;
   Thread videoThread = null;
   if (mVideoTrackIndex >= 0) {
     final MediaCodec codec = internal_start_video(mVideoMediaExtractor, mVideoTrackIndex);
     if (codec != null) {
       mVideoMediaCodec = codec;
       mVideoBufferInfo = new MediaCodec.BufferInfo();
       mVideoInputBuffers = codec.getInputBuffers();
       mVideoOutputBuffers = codec.getOutputBuffers();
     }
     mVideoInputDone = mVideoOutputDone = false;
     videoThread = new Thread(mVideoTask, "VideoTask");
   }
   if (videoThread != null) videoThread.start();
 }
 private final void handleStop() {
   if (DEBUG) Log.v(TAG, "handleStop:");
   synchronized (mVideoTask) {
     internal_stop_video();
     mVideoTrackIndex = -1;
   }
   if (mVideoMediaCodec != null) {
     mVideoMediaCodec.stop();
     mVideoMediaCodec.release();
     mVideoMediaCodec = null;
   }
   if (mVideoMediaExtractor != null) {
     mVideoMediaExtractor.release();
     mVideoMediaExtractor = null;
   }
   mVideoBufferInfo = null;
   mVideoInputBuffers = mVideoOutputBuffers = null;
   if (mMetadata != null) {
     mMetadata.release();
     mMetadata = null;
   }
   synchronized (mSync) {
     mVideoOutputDone = mVideoInputDone = true;
     mState = STATE_STOP;
   }
   mCallback.onFinished();
 }
  /**
   * Write a media sample to the decoder.
   *
   * <p>A "sample" here refers to a single atomic access unit in the media stream. The definition of
   * "access unit" is dependent on the type of encoding used, but it typically refers to a single
   * frame of video or a few seconds of audio. {@link android.media.MediaExtractor} extracts data
   * from a stream one sample at a time.
   *
   * @param extractor Instance of {@link android.media.MediaExtractor} wrapping the media.
   * @param presentationTimeUs The time, relative to the beginning of the media stream, at which
   *     this buffer should be rendered.
   * @param flags Flags to pass to the decoder. See {@link MediaCodec#queueInputBuffer(int, int,
   *     int, long, int)}
   * @throws MediaCodec.CryptoException
   */
  public boolean writeSample(
      final MediaExtractor extractor,
      final boolean isSecure,
      final long presentationTimeUs,
      int flags) {
    boolean result = false;
    boolean isEos = false;

    if (!mAvailableInputBuffers.isEmpty()) {
      int index = mAvailableInputBuffers.remove();
      ByteBuffer buffer = mInputBuffers[index];

      // reads the sample from the file using extractor into the buffer
      int size = extractor.readSampleData(buffer, 0);
      if (size <= 0) {
        flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
      }

      // Submit the buffer to the codec for decoding. The presentationTimeUs
      // indicates the position (play time) for the current sample.
      if (!isSecure) {
        mDecoder.queueInputBuffer(index, 0, size, presentationTimeUs, flags);
      } else {
        extractor.getSampleCryptoInfo(cryptoInfo);
        mDecoder.queueSecureInputBuffer(index, 0, cryptoInfo, presentationTimeUs, flags);
      }

      result = true;
    }
    return result;
  }
  /**
   * Constructs the {@link MediaCodecWrapper} wrapper object around the video codec. The codec is
   * created using the encapsulated information in the {@link MediaFormat} object.
   *
   * @param trackFormat The format of the media object to be decoded.
   * @param surface Surface to render the decoded frames.
   * @return
   */
  public static MediaCodecWrapper fromVideoFormat(final MediaFormat trackFormat, Surface surface) {
    MediaCodecWrapper result = null;
    MediaCodec videoCodec = null;

    // BEGIN_INCLUDE(create_codec)
    final String mimeType = trackFormat.getString(MediaFormat.KEY_MIME);

    // Check to see if this is actually a video mime type. If it is, then create
    // a codec that can decode this mime type.
    if (mimeType.contains("video/")) {
      try {
        videoCodec = MediaCodec.createDecoderByType(mimeType);
        videoCodec.configure(trackFormat, surface, null, 0);
      } catch (Exception e) {

      }
    }

    // If codec creation was successful, then create a wrapper object around the
    // newly created codec.
    if (videoCodec != null) {
      result = new MediaCodecWrapper(videoCodec);
    }
    // END_INCLUDE(create_codec)

    return result;
  }
 private final void handleInputVideo() {
   long presentationTimeUs = mVideoMediaExtractor.getSampleTime();
   if (presentationTimeUs < previousVideoPresentationTimeUs) {
     presentationTimeUs += previousVideoPresentationTimeUs - presentationTimeUs; //  + EPS;
   }
   previousVideoPresentationTimeUs = presentationTimeUs;
   final boolean b =
       internal_process_input(
           mVideoMediaCodec, mVideoMediaExtractor, mVideoInputBuffers, presentationTimeUs, false);
   if (!b) {
     if (DEBUG) Log.i(TAG, "video track input reached EOS");
     while (mIsRunning) {
       final int inputBufIndex = mVideoMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
       if (inputBufIndex >= 0) {
         mVideoMediaCodec.queueInputBuffer(
             inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
         if (DEBUG) Log.v(TAG, "sent input EOS:" + mVideoMediaCodec);
         break;
       }
     }
     synchronized (mSync) {
       mVideoInputDone = true;
       mSync.notifyAll();
     }
   }
 }
Beispiel #7
0
 /** Release all releated objects */
 protected void release() {
   if (DEBUG) Log.d(TAG, "release:");
   try {
     mListener.onStopped(this);
   } catch (Exception e) {
     Log.e(TAG, "failed onStopped", e);
   }
   mIsCapturing = false;
   if (mMediaCodec != null) {
     try {
       mMediaCodec.stop();
       mMediaCodec.release();
       mMediaCodec = null;
     } catch (Exception e) {
       Log.e(TAG, "failed releasing MediaCodec", e);
     }
   }
   if (mMuxerStarted) {
     final MediaMuxerWrapper muxer = mWeakMuxer.get();
     if (muxer != null) {
       try {
         muxer.stop();
       } catch (Exception e) {
         Log.e(TAG, "failed stopping muxer", e);
       }
     }
   }
   mBufferInfo = null;
 }
 private MediaCodecWrapper(MediaCodec codec) {
   mDecoder = codec;
   codec.start();
   mInputBuffers = codec.getInputBuffers();
   mOutputBuffers = codec.getOutputBuffers();
   mOutputBufferInfo = new MediaCodec.BufferInfo[mOutputBuffers.length];
   mAvailableInputBuffers = new ArrayDeque<Integer>(mOutputBuffers.length);
   mAvailableOutputBuffers = new ArrayDeque<Integer>(mInputBuffers.length);
 }
 @Override
 public void onDestroy() {
   super.onDestroy();
   if (encoder != null) {
     encoder.stop();
     encoder.release();
     encoder = null;
   }
 }
 // Dequeue and return an output buffer index, -1 if no output
 // buffer available or -2 if error happened.
 private DecoderOutputBufferInfo dequeueOutputBuffer(int dequeueTimeoutUs) {
   checkOnMediaCodecThread();
   try {
     MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
     int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
     while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED
         || result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
       if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
         outputBuffers = mediaCodec.getOutputBuffers();
         Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
       } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
         MediaFormat format = mediaCodec.getOutputFormat();
         Logging.d(TAG, "Decoder format changed: " + format.toString());
         width = format.getInteger(MediaFormat.KEY_WIDTH);
         height = format.getInteger(MediaFormat.KEY_HEIGHT);
         if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
           colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
           Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
           // Check if new color space is supported.
           boolean validColorFormat = false;
           for (int supportedColorFormat : supportedColorList) {
             if (colorFormat == supportedColorFormat) {
               validColorFormat = true;
               break;
             }
           }
           if (!validColorFormat) {
             Logging.e(TAG, "Non supported color format");
             return new DecoderOutputBufferInfo(-1, 0, 0, -1);
           }
         }
         if (format.containsKey("stride")) {
           stride = format.getInteger("stride");
         }
         if (format.containsKey("slice-height")) {
           sliceHeight = format.getInteger("slice-height");
         }
         Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
         stride = Math.max(width, stride);
         sliceHeight = Math.max(height, sliceHeight);
       }
       result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs);
     }
     if (result >= 0) {
       return new DecoderOutputBufferInfo(result, info.offset, info.size, info.presentationTimeUs);
     }
     return null;
   } catch (IllegalStateException e) {
     Logging.e(TAG, "dequeueOutputBuffer failed", e);
     return new DecoderOutputBufferInfo(-1, 0, 0, -1);
   }
 }
Beispiel #11
0
 /*     */ private void g() {
   /* 434 */ a.a("formatNotSupportMsg start");
   /* 435 */ if (a.a != null) {
     /* 436 */ Message localMessage = new Message();
     /* 437 */ localMessage.what = 1;
     /* 438 */ localMessage.arg1 = HKErrorCode.ERROR_MEDIA_UNSUPPORTED.ordinal();
     /* 439 */ Bundle localBundle = new Bundle();
     /* 440 */ localBundle.putString("errorMesg", "media format is not supported !");
     /* 441 */ localMessage.setData(localBundle);
     /* 442 */ a.a.sendMessage(localMessage);
     /*     */ }
   /* 444 */ a.a("formatNotSupportMsg end");
   /*     */ }
 private void releaseEncoder() {
   // Release encoder
   if (VERBOSE) {
     Log.v(TAG, "releasing encoder");
   }
   if (mEncoder != null) {
     mEncoder.stop();
     mEncoder.release();
     if (mRecordingSurface != null) {
       mRecordingSurface.release();
     }
     mEncoder = null;
   }
 }
 /**
  * @param media_extractor
  * @param trackIndex
  * @return
  */
 protected MediaCodec internal_start_video(
     final MediaExtractor media_extractor, final int trackIndex) {
   if (DEBUG) Log.v(TAG, "internal_start_video:");
   MediaCodec codec = null;
   if (trackIndex >= 0) {
     final MediaFormat format = media_extractor.getTrackFormat(trackIndex);
     final String mime = format.getString(MediaFormat.KEY_MIME);
     codec = MediaCodec.createDecoderByType(mime);
     codec.configure(format, mOutputSurface, null, 0);
     codec.start();
     if (DEBUG) Log.v(TAG, "internal_start_video:codec started");
   }
   return codec;
 }
  /** Synchronize this object's state with the internal state of the wrapped MediaCodec. */
  private void update() {
    // BEGIN_INCLUDE(update_codec_state)
    int index;

    // Get valid input buffers from the codec to fill later in the same order they were
    // made available by the codec.
    while ((index = mDecoder.dequeueInputBuffer(0)) != MediaCodec.INFO_TRY_AGAIN_LATER) {
      mAvailableInputBuffers.add(index);
    }

    // Likewise with output buffers. If the output buffers have changed, start using the
    // new set of output buffers. If the output format has changed, notify listeners.
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    while ((index = mDecoder.dequeueOutputBuffer(info, 0)) != MediaCodec.INFO_TRY_AGAIN_LATER) {
      switch (index) {
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
          mOutputBuffers = mDecoder.getOutputBuffers();
          mOutputBufferInfo = new MediaCodec.BufferInfo[mOutputBuffers.length];
          mAvailableOutputBuffers.clear();
          break;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
          if (mOutputFormatChangedListener != null) {
            mHandler.post(
                new Runnable() {
                  @Override
                  public void run() {
                    mOutputFormatChangedListener.outputFormatChanged(
                        MediaCodecWrapper.this, mDecoder.getOutputFormat());
                  }
                });
          }
          break;
        default:
          // Making sure the index is valid before adding to output buffers. We've already
          // handled INFO_TRY_AGAIN_LATER, INFO_OUTPUT_FORMAT_CHANGED &
          // INFO_OUTPUT_BUFFERS_CHANGED i.e all the other possible return codes but
          // asserting index value anyways for future-proofing the code.
          if (index >= 0) {
            mOutputBufferInfo[index] = info;
            mAvailableOutputBuffers.add(index);
          } else {
            throw new IllegalStateException("Unknown status from dequeueOutputBuffer");
          }
          break;
      }
    }
    // END_INCLUDE(update_codec_state)

  }
  private void initStream() throws IOException, IllegalArgumentException {
    L.v(TAG, "initStream called in state=" + state);
    lock.lock();
    try {
      extractor = new MediaExtractor();
      if (path != null) {
        extractor.setDataSource(path);
      } else {
        error("initStream");
        throw new IOException();
      }
      int trackNum = 0;
      final MediaFormat oFormat = extractor.getTrackFormat(trackNum);

      if (!oFormat.containsKey(MediaFormat.KEY_SAMPLE_RATE)) {
        error("initStream");
        throw new IOException("No KEY_SAMPLE_RATE");
      }
      int sampleRate = oFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE);

      if (!oFormat.containsKey(MediaFormat.KEY_CHANNEL_COUNT)) {
        error("initStream");
        throw new IOException("No KEY_CHANNEL_COUNT");
      }
      int channelCount = oFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT);

      if (!oFormat.containsKey(MediaFormat.KEY_MIME)) {
        error("initStream");
        throw new IOException("No KEY_MIME");
      }
      final String mime = oFormat.getString(MediaFormat.KEY_MIME);

      if (!oFormat.containsKey(MediaFormat.KEY_DURATION)) {
        error("initStream");
        throw new IOException("No KEY_DURATION");
      }
      duration = oFormat.getLong(MediaFormat.KEY_DURATION);

      L.v(TAG, "Sample rate: " + sampleRate);
      L.v(TAG, "Mime type: " + mime);
      initDevice(sampleRate, channelCount);
      extractor.selectTrack(trackNum);
      codec = MediaCodec.createDecoderByType(mime);
      codec.configure(oFormat, null, null, 0);
    } finally {
      lock.unlock();
    }
  }
  @Override
  protected void prepare() throws IOException {
    if (DEBUG) Log.v(TAG, "prepare:");
    mTrackIndex = -1;
    mMuxerStarted = mIsEOS = false;
    // prepare MediaCodec for AAC encoding of audio data from inernal mic.
    final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
    if (audioCodecInfo == null) {
      Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
      return;
    }
    if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());

    final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
    audioFormat.setInteger(
        MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
    audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    //		audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
    //      audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
    if (DEBUG) Log.i(TAG, "format: " + audioFormat);
    mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
    mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mMediaCodec.start();
    if (DEBUG) Log.i(TAG, "prepare finishing");
    if (mListener != null) {
      try {
        mListener.onPrepared(this);
      } catch (final Exception e) {
        Log.e(TAG, "prepare:", e);
      }
    }
  }
Beispiel #17
0
  /** Video encoding is done by a MediaCodec. But here we will use the buffer-to-surface method */
  @SuppressLint({"InlinedApi", "NewApi"})
  protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException {

    Log.d(TAG, "Video encoded using the MediaCodec API with a surface");

    // Updates the parameters of the camera if needed
    createCamera();
    updateCamera();

    // Estimates the framerate of the camera
    measureFramerate();

    EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);

    mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
    MediaFormat mediaFormat =
        MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
    mediaFormat.setInteger(
        MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    Surface surface = mMediaCodec.createInputSurface();
    ((SurfaceView) mSurfaceView).addMediaCodecSurface(surface);
    mMediaCodec.start();

    mStreaming = true;
  }
 @Override
 protected void onOutputFormatChanged(MediaCodec codec, android.media.MediaFormat outputFormat) {
   boolean hasCrop =
       outputFormat.containsKey(KEY_CROP_RIGHT)
           && outputFormat.containsKey(KEY_CROP_LEFT)
           && outputFormat.containsKey(KEY_CROP_BOTTOM)
           && outputFormat.containsKey(KEY_CROP_TOP);
   currentWidth =
       hasCrop
           ? outputFormat.getInteger(KEY_CROP_RIGHT) - outputFormat.getInteger(KEY_CROP_LEFT) + 1
           : outputFormat.getInteger(android.media.MediaFormat.KEY_WIDTH);
   currentHeight =
       hasCrop
           ? outputFormat.getInteger(KEY_CROP_BOTTOM) - outputFormat.getInteger(KEY_CROP_TOP) + 1
           : outputFormat.getInteger(android.media.MediaFormat.KEY_HEIGHT);
   currentPixelWidthHeightRatio = pendingPixelWidthHeightRatio;
   if (Util.SDK_INT >= 21) {
     // On API level 21 and above the decoder applies the rotation when rendering to the surface.
     // Hence currentUnappliedRotation should always be 0. For 90 and 270 degree rotations, we need
     // to flip the width, height and pixel aspect ratio to reflect the rotation that was applied.
     if (pendingRotationDegrees == 90 || pendingRotationDegrees == 270) {
       int rotatedHeight = currentWidth;
       currentWidth = currentHeight;
       currentHeight = rotatedHeight;
       currentPixelWidthHeightRatio = 1 / currentPixelWidthHeightRatio;
     }
   } else {
     // On API level 20 and below the decoder does not apply the rotation.
     currentUnappliedRotationDegrees = pendingRotationDegrees;
   }
   // Must be applied each time the output format changes.
   codec.setVideoScalingMode(videoScalingMode);
 }
  /**
   * Create the display surface out of the encoder. The data to encoder will be fed from this
   * Surface itself.
   *
   * @return
   * @throws IOException
   */
  @TargetApi(19)
  private Surface createDisplaySurface() throws IOException {
    MediaFormat mMediaFormat =
        MediaFormat.createVideoFormat(CodecUtils.MIME_TYPE, CodecUtils.WIDTH, CodecUtils.HEIGHT);
    mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, (int) (1024 * 1024 * 0.5));
    mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
    mMediaFormat.setInteger(
        MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    Log.i(TAG, "Starting encoder");
    encoder = MediaCodec.createEncoderByType(CodecUtils.MIME_TYPE);
    encoder.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

    Surface surface = encoder.createInputSurface();
    return surface;
  }
 // Dequeue an input buffer and return its index, -1 if no input buffer is
 // available, or -2 if the codec is no longer operative.
 private int dequeueInputBuffer() {
   checkOnMediaCodecThread();
   try {
     return mediaCodec.dequeueInputBuffer(DEQUEUE_INPUT_TIMEOUT);
   } catch (IllegalStateException e) {
     Logging.e(TAG, "dequeueIntputBuffer failed", e);
     return -2;
   }
 }
 public void prepareEncode() {
   MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
   // 描述视频格式的信息
   mediaFormat.setInteger(
       MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
   mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BITRATE); // 比特率
   mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); // 帧数
   mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); // 关键帧间隔时间
   try {
     mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
     mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
     mSurface = mEncoder.createInputSurface();
     mEncoder.start();
     mediaMuxer = new MediaMuxer(filePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
   } catch (IOException e) {
     e.printStackTrace();
   }
 }
 // Override configureCodec to provide the surface.
 @Override
 protected void configureCodec(
     MediaCodec codec,
     boolean codecIsAdaptive,
     android.media.MediaFormat format,
     MediaCrypto crypto) {
   maybeSetMaxInputSize(format, codecIsAdaptive);
   codec.configure(format, surface, crypto, 0);
 }
 protected void renderOutputBuffer(MediaCodec codec, int bufferIndex) {
   maybeNotifyVideoSizeChanged();
   TraceUtil.beginSection("releaseOutputBuffer");
   codec.releaseOutputBuffer(bufferIndex, true);
   TraceUtil.endSection();
   codecCounters.renderedOutputBufferCount++;
   renderedFirstFrame = true;
   maybeNotifyDrawnToSurface();
 }
 /** @param frameCallback */
 private final void handleOutputVideo(final IFrameCallback frameCallback) {
   //    	if (DEBUG) Log.v(TAG, "handleDrainVideo:");
   while (mIsRunning && !mVideoOutputDone) {
     final int decoderStatus =
         mVideoMediaCodec.dequeueOutputBuffer(mVideoBufferInfo, TIMEOUT_USEC);
     if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
       return;
     } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
       mVideoOutputBuffers = mVideoMediaCodec.getOutputBuffers();
       if (DEBUG) Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED:");
     } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
       final MediaFormat newFormat = mVideoMediaCodec.getOutputFormat();
       if (DEBUG) Log.d(TAG, "video decoder output format changed: " + newFormat);
     } else if (decoderStatus < 0) {
       throw new RuntimeException(
           "unexpected result from video decoder.dequeueOutputBuffer: " + decoderStatus);
     } else { // decoderStatus >= 0
       boolean doRender = false;
       if (mVideoBufferInfo.size > 0) {
         doRender =
             (mVideoBufferInfo.size != 0)
                 && !internal_write_video(
                     mVideoOutputBuffers[decoderStatus],
                     0,
                     mVideoBufferInfo.size,
                     mVideoBufferInfo.presentationTimeUs);
         if (doRender) {
           if (!frameCallback.onFrameAvailable(mVideoBufferInfo.presentationTimeUs))
             mVideoStartTime =
                 adjustPresentationTime(
                     mVideoSync, mVideoStartTime, mVideoBufferInfo.presentationTimeUs);
         }
       }
       mVideoMediaCodec.releaseOutputBuffer(decoderStatus, doRender);
       if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
         if (DEBUG) Log.d(TAG, "video:output EOS");
         synchronized (mSync) {
           mVideoOutputDone = true;
           mSync.notifyAll();
         }
       }
     }
   }
 }
 // Release a dequeued output buffer back to the codec for re-use.  Return
 // false if the codec is no longer operable.
 private boolean releaseOutputBuffer(int index) {
   checkOnMediaCodecThread();
   try {
     mediaCodec.releaseOutputBuffer(index, useSurface);
     return true;
   } catch (IllegalStateException e) {
     Logging.e(TAG, "releaseOutputBuffer failed", e);
     return false;
   }
 }
    void setup() {
      int width = 0, height = 0;

      extractor = new MediaExtractor();

      try {
        extractor.setDataSource(mVideoFile.getPath());
      } catch (IOException e) {
        return;
      }

      for (int i = 0; i < extractor.getTrackCount(); i++) {
        MediaFormat format = extractor.getTrackFormat(i);
        String mime = format.getString(MediaFormat.KEY_MIME);
        width = format.getInteger(MediaFormat.KEY_WIDTH);
        height = format.getInteger(MediaFormat.KEY_HEIGHT);

        if (mime.startsWith("video/")) {
          extractor.selectTrack(i);
          try {
            decoder = MediaCodec.createDecoderByType(mime);
          } catch (IOException e) {
            continue;
          }
          // Decode to surface
          // decoder.configure(format, surface, null, 0);

          // Decode to offscreen surface
          surface = new CtsMediaOutputSurface(width, height);
          mMatBuffer = new MatBuffer(width, height);

          decoder.configure(format, surface.getSurface(), null, 0);
          break;
        }
      }

      if (decoder == null) {
        Log.e("VideoDecoderForOpenCV", "Can't find video info!");
        return;
      }
      valid = true;
    }
  private void release() {

    if (mEncoder != null) {
      mEncoder.stop();
      mEncoder.release();
      mEncoder = null;
    }

    if (mVirtualDisplay != null) {
      mVirtualDisplay.release();
    }

    if (displayManager != null) {
      displayManager = null;
    }
    if (mediaMuxer != null) {
      mediaMuxer.stop();
      mediaMuxer.release();
      mediaMuxer = null;
    }
  }
 private boolean queueInputBuffer(int inputBufferIndex, int size, long timestampUs) {
   checkOnMediaCodecThread();
   try {
     inputBuffers[inputBufferIndex].position(0);
     inputBuffers[inputBufferIndex].limit(size);
     mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0);
     return true;
   } catch (IllegalStateException e) {
     Logging.e(TAG, "decode failed", e);
     return false;
   }
 }
  /**
   * Checks the video file to see if the contents match our expectations. We decode the video to a
   * Surface and check the pixels with GL.
   */
  private void checkVideoFile(VideoChunks inputData) {
    OutputSurface surface = null;
    MediaCodec decoder = null;

    mLargestColorDelta = -1;

    if (VERBOSE) Log.d(TAG, "checkVideoFile");

    try {
      surface = new OutputSurface(mWidth, mHeight);

      MediaFormat format = inputData.getMediaFormat();
      decoder = MediaCodec.createDecoderByType(MIME_TYPE);
      decoder.configure(format, surface.getSurface(), null, 0);
      decoder.start();

      int badFrames = checkVideoData(inputData, decoder, surface);
      if (badFrames != 0) {
        fail("Found " + badFrames + " bad frames");
      }
    } finally {
      if (surface != null) {
        surface.release();
      }
      if (decoder != null) {
        decoder.stop();
        decoder.release();
      }

      Log.i(TAG, "Largest color delta: " + mLargestColorDelta);
    }
  }
 /**
  * @param codec
  * @param extractor
  * @param inputBuffers
  * @param presentationTimeUs
  * @param isAudio
  */
 protected boolean internal_process_input(
     final MediaCodec codec,
     final MediaExtractor extractor,
     final ByteBuffer[] inputBuffers,
     final long presentationTimeUs,
     final boolean isAudio) {
   //		if (DEBUG) Log.v(TAG, "internal_process_input:presentationTimeUs=" + presentationTimeUs);
   boolean result = true;
   while (mIsRunning) {
     final int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_USEC);
     if (inputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) break;
     if (inputBufIndex >= 0) {
       final int size = extractor.readSampleData(inputBuffers[inputBufIndex], 0);
       if (size > 0) {
         codec.queueInputBuffer(inputBufIndex, 0, size, presentationTimeUs, 0);
       }
       result = extractor.advance(); // return false if no data is available
       break;
     }
   }
   return result;
 }