/**
   * Write a media sample to the decoder.
   *
   * <p>A "sample" here refers to a single atomic access unit in the media stream. The definition of
   * "access unit" is dependent on the type of encoding used, but it typically refers to a single
   * frame of video or a few seconds of audio. {@link android.media.MediaExtractor} extracts data
   * from a stream one sample at a time.
   *
   * @param extractor Instance of {@link android.media.MediaExtractor} wrapping the media.
   * @param presentationTimeUs The time, relative to the beginning of the media stream, at which
   *     this buffer should be rendered.
   * @param flags Flags to pass to the decoder. See {@link MediaCodec#queueInputBuffer(int, int,
   *     int, long, int)}
   * @throws MediaCodec.CryptoException
   */
  public boolean writeSample(
      final MediaExtractor extractor,
      final boolean isSecure,
      final long presentationTimeUs,
      int flags) {
    boolean result = false;
    boolean isEos = false;

    if (!mAvailableInputBuffers.isEmpty()) {
      int index = mAvailableInputBuffers.remove();
      ByteBuffer buffer = mInputBuffers[index];

      // reads the sample from the file using extractor into the buffer
      int size = extractor.readSampleData(buffer, 0);
      if (size <= 0) {
        flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM;
      }

      // Submit the buffer to the codec for decoding. The presentationTimeUs
      // indicates the position (play time) for the current sample.
      if (!isSecure) {
        mDecoder.queueInputBuffer(index, 0, size, presentationTimeUs, flags);
      } else {
        extractor.getSampleCryptoInfo(cryptoInfo);
        mDecoder.queueSecureInputBuffer(index, 0, cryptoInfo, presentationTimeUs, flags);
      }

      result = true;
    }
    return result;
  }
  public void setDataSource(MediaSource source) throws IOException {
    mVideoExtractor = source.getVideoExtractor();
    mAudioExtractor = source.getAudioExtractor();

    mVideoTrackIndex = -1;
    mAudioTrackIndex = -1;

    for (int i = 0; i < mVideoExtractor.getTrackCount(); ++i) {
      MediaFormat format = mVideoExtractor.getTrackFormat(i);
      Log.d(TAG, format.toString());
      String mime = format.getString(MediaFormat.KEY_MIME);
      if (mVideoTrackIndex < 0 && mime.startsWith("video/")) {
        mVideoExtractor.selectTrack(i);
        mVideoTrackIndex = i;
        mVideoFormat = format;
        mVideoMinPTS = mVideoExtractor.getSampleTime();
      } else if (mAudioExtractor == null && mAudioTrackIndex < 0 && mime.startsWith("audio/")) {
        mVideoExtractor.selectTrack(i);
        mAudioTrackIndex = i;
        mAudioFormat = format;
        mAudioMinPTS = mVideoExtractor.getSampleTime();
      }
    }

    if (mAudioExtractor != null) {
      for (int i = 0; i < mAudioExtractor.getTrackCount(); ++i) {
        MediaFormat format = mAudioExtractor.getTrackFormat(i);
        Log.d(TAG, format.toString());
        String mime = format.getString(MediaFormat.KEY_MIME);
        if (mAudioTrackIndex < 0 && mime.startsWith("audio/")) {
          mAudioExtractor.selectTrack(i);
          mAudioTrackIndex = i;
          mAudioFormat = format;
          mAudioMinPTS = mAudioExtractor.getSampleTime();
        }
      }
    }

    if (mVideoFormat == null) {
      throw new IOException("no video track found");
    } else {
      if (mAudioFormat == null) {
        Log.i(TAG, "no audio track found");
      }
      if (mPlaybackThread == null) {
        if (mSurface == null) {
          Log.i(TAG, "no video output surface specified");
        }

        mPlaybackThread = new PlaybackThread();
        mPlaybackThread.start();
      }
    }
  }
 private boolean queueAudioSampleToCodec(MediaExtractor extractor) {
   if (mAudioCodec == null) {
     throw new IllegalStateException("no audio track configured");
   }
   /* NOTE the track index checks only for debugging
    * when enabled, they prevent the EOS detection and handling below */
   //            int trackIndex = extractor.getSampleTrackIndex();
   //            if(trackIndex == -1) {
   //                throw new IllegalStateException("EOS");
   //            }
   //            if(trackIndex != mAudioTrackIndex) {
   //                throw new IllegalStateException("wrong track index: " + trackIndex);
   //            }
   boolean sampleQueued = false;
   int inputBufIndex = mAudioCodec.dequeueInputBuffer(mTimeOutUs);
   if (inputBufIndex >= 0) {
     ByteBuffer inputBuffer = mAudioCodecInputBuffers[inputBufIndex];
     int sampleSize = extractor.readSampleData(inputBuffer, 0);
     long presentationTimeUs = 0;
     if (sampleSize < 0) {
       Log.d(TAG, "EOS audio input");
       mAudioInputEos = true;
       sampleSize = 0;
     } else if (sampleSize == 0) {
       if (extractor.getCachedDuration() == 0) {
         mBuffering = true;
         mEventHandler.sendMessage(
             mEventHandler.obtainMessage(MEDIA_INFO, MEDIA_INFO_BUFFERING_START, 0));
       }
     } else {
       presentationTimeUs = extractor.getSampleTime();
       sampleQueued = true;
     }
     mAudioCodec.queueInputBuffer(
         inputBufIndex,
         0,
         sampleSize,
         presentationTimeUs,
         mAudioInputEos ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0);
     if (!mAudioInputEos) {
       extractor.advance();
     }
   }
   return sampleQueued;
 }