/** Shutdown this Muxer Must be called from Muxer thread */
 private void shutdown() {
   if (!mReady || !mStarted) {
     return;
   }
   Log.i(TAG, "Shutting down");
   mFFmpeg.finalize();
   mStarted = false;
   release();
   if (formatRequiresBuffering()) {
     Looper.myLooper().quit();
   }
   getConfig().setMuxerState(EncodingConfig.MUXER_STATE.SHUTDOWN);
 }
  @Override
  public void prepare(EncodingConfig config) {
    super.prepare(config);
    getConfig().setMuxerState(EncodingConfig.MUXER_STATE.PREPARING);
    mReady = false;

    videoConfig = null;
    audioConfig = null;
    mH264Keyframe = null;
    mH264MetaSize = -1;
    mStarted = false;
    mEncoderReleased = false;
    mFFmpeg.init(getConfig().getAVOptions());

    if (formatRequiresADTS()) mCachedAudioPacket = new byte[1024];

    if (formatRequiresBuffering()) {
      mMuxerInputQueue = new ArrayList<ArrayDeque<ByteBuffer>>();
      startMuxingThread();
    } else {
      getConfig().setMuxerState(EncodingConfig.MUXER_STATE.READY);
      mReady = true;
    }
  }
  public void handleWriteSampleData(
      MediaCodec encoder,
      int trackIndex,
      int bufferIndex,
      ByteBuffer encodedData,
      MediaCodec.BufferInfo bufferInfo) {
    super.writeSampleData(encoder, trackIndex, bufferIndex, encodedData, bufferInfo);

    if (((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0)) {
      if (VERBOSE) Log.i(TAG, "handling BUFFER_FLAG_CODEC_CONFIG for track " + trackIndex);
      if (trackIndex == mVideoTrackIndex) {
        // Capture H.264 SPS + PPS Data
        Log.d(TAG, "Capture SPS + PPS");

        captureH264MetaData(encodedData, bufferInfo);
        mFFmpeg.setVideoCodecExtraData(videoConfig, videoConfig.length);
      } else {
        captureAACMetaData(encodedData, bufferInfo);

        Log.d(TAG, "AUDIO CONFIG LENGTH: " + audioConfig.length);
        mFFmpeg.setAudioCodecExtraData(audioConfig, audioConfig.length);
      }
      if (videoConfig != null && audioConfig != null) {
        getConfig().setMuxerState(EncodingConfig.MUXER_STATE.CONNECTING);
        mFFmpeg.writeHeader();
      }
      releaseOutputBufer(encoder, encodedData, bufferIndex, trackIndex);
      return;
    }

    if (trackIndex == mAudioTrackIndex && formatRequiresADTS()) {
      addAdtsToByteBuffer(encodedData, bufferInfo);
    }

    // adjust the ByteBuffer values to match BufferInfo (not needed?)
    encodedData.position(bufferInfo.offset);
    encodedData.limit(bufferInfo.offset + bufferInfo.size);

    bufferInfo.presentationTimeUs = getNextRelativePts(bufferInfo.presentationTimeUs, trackIndex);

    if (!allTracksAdded()) {
      if (trackIndex == mVideoTrackIndex) {
        Log.d(TAG, "RECEIVED VIDEO DATA NOT ALL TRACKS ADDED");
      } else {
        Log.d(TAG, "RECEIVED AUDIO DATA NOT ALL TRACKS ADDED");
      }
    }
    if (!allTracksFinished() && allTracksAdded()) {
      boolean isVideo = trackIndex == mVideoTrackIndex;
      if (isVideo && ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0)) {
        getConfig().setMuxerState(EncodingConfig.MUXER_STATE.STREAMING);
        Log.d(TAG, "WRITING VIDEO KEYFRAME");
        packageH264Keyframe(encodedData, bufferInfo);

        mFFmpeg.writePacket(
            mH264Keyframe,
            bufferInfo.size + mH264MetaSize,
            bufferInfo.presentationTimeUs,
            cBoolean(isVideo),
            cBoolean(true));
      } else {
        Log.d(TAG, "WRITING " + (isVideo ? "VIDEO" : "AUDIO") + " DATA");
        mFFmpeg.writePacket(
            encodedData,
            bufferInfo.size,
            bufferInfo.presentationTimeUs,
            cBoolean(isVideo),
            cBoolean(false));
      }
    }
    releaseOutputBufer(encoder, encodedData, bufferIndex, trackIndex);

    if (allTracksFinished()) {
      shutdown();
    }
  }