private void decodeAudioSample() {
      int output = mAudioCodec.dequeueOutputBuffer(mAudioInfo, mTimeOutUs);
      if (output >= 0) {
        // http://bigflake.com/mediacodec/#q11
        ByteBuffer outputData = mAudioCodecOutputBuffers[output];
        if (mAudioInfo.size != 0) {
          outputData.position(mAudioInfo.offset);
          outputData.limit(mAudioInfo.offset + mAudioInfo.size);
          // Log.d(TAG, "raw audio data bytes: " + mVideoInfo.size);
        }
        mAudioPlayback.write(outputData, mAudioInfo.presentationTimeUs);
        mAudioCodec.releaseOutputBuffer(output, false);

        if ((mAudioInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
          mAudioOutputEos = true;
          Log.d(TAG, "EOS audio output");
        }
      } else if (output == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
        Log.d(TAG, "audio output buffers have changed.");
        mAudioCodecOutputBuffers = mAudioCodec.getOutputBuffers();
      } else if (output == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        MediaFormat format = mAudioCodec.getOutputFormat();
        Log.d(TAG, "audio output format has changed to " + format);
        mAudioPlayback.init(format);
      }
    }
    @Override
    public void run() {
      try {
        mVideoCodec = MediaCodec.createDecoderByType(mVideoFormat.getString(MediaFormat.KEY_MIME));
        mVideoCodec.configure(mVideoFormat, mSurface, null, 0);
        mVideoCodec.start();
        mVideoCodecInputBuffers = mVideoCodec.getInputBuffers();
        mVideoCodecOutputBuffers = mVideoCodec.getOutputBuffers();
        mVideoInfo = new MediaCodec.BufferInfo();
        mVideoInputEos = false;
        mVideoOutputEos = false;

        if (mAudioFormat != null) {
          mAudioCodec =
              MediaCodec.createDecoderByType(mAudioFormat.getString(MediaFormat.KEY_MIME));
          mAudioCodec.configure(mAudioFormat, null, null, 0);
          mAudioCodec.start();
          mAudioCodecInputBuffers = mAudioCodec.getInputBuffers();
          mAudioCodecOutputBuffers = mAudioCodec.getOutputBuffers();
          mAudioInfo = new MediaCodec.BufferInfo();
          mAudioInputEos = false;
          mAudioOutputEos = false;

          mAudioPlayback = new AudioPlayback();
          mAudioPlayback.setAudioSessionId(mAudioSessionId);
          mAudioPlayback.init(mAudioFormat);
          mAudioSessionId = mAudioPlayback.getAudioSessionId();
        }

        mEventHandler.sendMessage(
            mEventHandler.obtainMessage(MEDIA_SET_VIDEO_SIZE, getVideoWidth(), getVideoHeight()));

        mBuffering = false;
        boolean preparing = true; // used on startup to process stream until the first frame
        int frameSkipCount = 0;
        long lastPTS = 0;

        while (!mVideoOutputEos) {
          if (mVideoExtractor.hasTrackFormatChanged()) {
            // Get new video format and restart video codec with this format
            mVideoFormat = mVideoExtractor.getTrackFormat(mVideoTrackIndex);

            mVideoCodec.stop();
            mVideoCodec.configure(mVideoFormat, mSurface, null, 0);
            mVideoCodec.start();
            mVideoCodecInputBuffers = mVideoCodec.getInputBuffers();
            mVideoCodecOutputBuffers = mVideoCodec.getOutputBuffers();

            if (mAudioFormat != null) {
              mAudioCodec.stop();
              mAudioCodec.configure(mAudioFormat, null, null, 0);
              mAudioCodec.start();
              mAudioCodecInputBuffers = mAudioCodec.getInputBuffers();
              mAudioCodecOutputBuffers = mAudioCodec.getOutputBuffers();
            }

            mEventHandler.sendMessage(
                mEventHandler.obtainMessage(
                    MEDIA_SET_VIDEO_SIZE, getVideoWidth(), getVideoHeight()));
          }

          if (mPaused && !mSeekPrepare && !mSeeking && !preparing) {
            if (mAudioPlayback != null) mAudioPlayback.pause();
            synchronized (this) {
              while (mPaused && !mSeekPrepare && !mSeeking) {
                this.wait();
              }
            }

            if (mAudioPlayback != null) mAudioPlayback.play();
            // reset time (otherwise playback tries to "catch up" time after a pause)
            mTimeBase.startAt(mVideoInfo.presentationTimeUs);
          }

          // For a seek, first prepare by seeking the media extractor and flushing the codec.
          if (mSeekPrepare) {
            Log.d(TAG, "seeking to:                 " + mSeekTargetTime);
            Log.d(TAG, "frame current position:     " + mCurrentPosition);
            Log.d(TAG, "extractor current position: " + mVideoExtractor.getSampleTime());

            mVideoExtractor.seekTo(mSeekTargetTime, MediaExtractor.SEEK_TO_PREVIOUS_SYNC);
            mCurrentPosition = mSeekTargetTime;

            Log.d(TAG, "extractor new position:     " + mVideoExtractor.getSampleTime());

            mVideoCodec.flush();
            if (mAudioFormat != null) mAudioCodec.flush();
            mSeekPrepare = false;
            mSeeking = true;

            if (mSeekMode == SeekMode.FAST_EXACT) {
              /* Check if the seek target time after the seek is the same as before the
               * seek. If not, a new seek has been issued in the meantime; the current
               * one must be discarded and a new seek with the new target time started.
               */
              long in;
              long out;
              do {
                in = mSeekTargetTime;
                out = fastSeek(in);
              } while (in != mSeekTargetTime);
              mSeekTargetTime = out;
              mSeekPrepare = false;
            }
          }

          if (!mVideoInputEos) {
            queueMediaSampleToCodec(mSeeking);
          }
          lastPTS = mVideoInfo.presentationTimeUs;
          int res = mVideoCodec.dequeueOutputBuffer(mVideoInfo, mTimeOutUs);
          mVideoOutputEos =
              res >= 0 && (mVideoInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
          if (res >= 0) {
            int outputBufIndex = res;
            boolean render = true;
            // ByteBuffer buf = codecOutputBuffers[outputBufIndex];

            // Log.d(TAG, "pts=" + info.presentationTimeUs);

            if (mSeeking) {
              if (mVideoOutputEos) {
                /* When the end of stream is reached while seeking, the seek target
                 * time is set to the last frame's PTS, else the seek skips the last
                 * frame which then does not get rendered, and it might end up in a
                 * loop trying to reach the unreachable target time. */
                mSeekTargetTime = mVideoInfo.presentationTimeUs;
              }
              /* NOTE
               * This code seeks one frame too far, except if the seek time equals the
               * frame PTS:
               * (F1.....)(F2.....)(F3.....) ... (Fn.....)
               * A frame is shown for an interval, e.g. (1/fps seconds). Now if the seek
               * target time is somewhere in frame 2's interval, we end up with frame 3
               * because we need to decode it to know if the seek target time lies in
               * frame 2's interval (because we don't know the frame rate of the video,
               * and neither if it's a fixed frame rate or a variable one - even when
               * deriving it from the PTS series we cannot be sure about it). This means
               * we always end up one frame too far, because the MediaCodec does not allow
               * to go back, except when starting at a sync frame.
               *
               * Solution for fixed frame rate could be to subtract the frame interval
               * time (1/fps secs) from the seek target time.
               *
               * Solution for variable frame rate and unknown frame rate: go back to sync
               * frame and re-seek to the now known exact PTS of the desired frame.
               * See EXACT mode handling below.
               */
              /* Android API compatibility:
               * Use millisecond precision to stay compatible with VideoView API that works
               * in millisecond precision only. Else, exact seek matches are missed if frames
               * are positioned at fractions of a millisecond. */
              long presentationTimeMs = mVideoInfo.presentationTimeUs / 1000;
              long seekTargetTimeMs = mSeekTargetTime / 1000;
              if ((mSeekMode == SeekMode.PRECISE || mSeekMode == SeekMode.EXACT)
                  && presentationTimeMs < seekTargetTimeMs) {
                // this is not yet the aimed time so we skip rendering of this frame
                render = false;
                if (frameSkipCount == 0) {
                  Log.d(TAG, "skipping frames...");
                }
                frameSkipCount++;
              } else {
                Log.d(TAG, "frame new position:         " + mVideoInfo.presentationTimeUs);
                Log.d(TAG, "seeking finished, skipped " + frameSkipCount + " frames");
                frameSkipCount = 0;

                if (mSeekMode == SeekMode.EXACT && presentationTimeMs > seekTargetTimeMs) {
                  /* If the current frame's PTS it after the seek target time, we're
                   * one frame too far into the stream. This is because we do not know
                   * the frame rate of the video and therefore can't decide for a frame
                   * if its interval covers the seek target time of if there's already
                   * another frame coming. We know after the next frame has been
                   * decoded though if we're too far into the stream, and if so, and if
                   * EXACT mode is desired, we need to take the previous frame's PTS
                   * and repeat the seek with that PTS to arrive at the desired frame.
                   */
                  Log.d(TAG, "exact seek: repeat seek for previous frame");
                  render = false;
                  seekTo(lastPTS);
                } else {
                  if (presentationTimeMs == seekTargetTimeMs) {
                    Log.d(TAG, "exact seek match!");
                  }

                  if (mSeekMode == SeekMode.FAST_EXACT
                      && mVideoInfo.presentationTimeUs < mSeekTargetTime) {
                    // this should only ever happen in fastseek mode, else it FFWs in fast mode
                    Log.d(TAG, "presentation is behind, another try...");
                  } else {
                    // reset time to keep frame rate constant (otherwise it's too fast on back seeks
                    // and waits for the PTS time on fw seeks)
                    mTimeBase.startAt(mVideoInfo.presentationTimeUs);
                    mCurrentPosition = mVideoInfo.presentationTimeUs;
                    mSeeking = false;
                    if (mAudioExtractor != null) {
                      mAudioExtractor.seekTo(
                          mVideoInfo.presentationTimeUs, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
                      mAudioPlayback.flush();
                    }
                    mEventHandler.sendEmptyMessage(MEDIA_SEEK_COMPLETE);
                  }
                }
              }
            } else {
              mCurrentPosition = mVideoInfo.presentationTimeUs;

              long waitingTime = mTimeBase.getOffsetFrom(mVideoInfo.presentationTimeUs);

              if (mAudioFormat != null) {
                long audioOffsetUs = mAudioPlayback.getLastPresentationTimeUs() - mCurrentPosition;
                //                                Log.d(TAG, "VideoPTS=" + mCurrentPosition
                //                                        + " AudioPTS=" +
                // mAudioPlayback.getLastPresentationTimeUs()
                //                                        + " offset=" + audioOffsetUs);
                /* Synchronize the video frame PTS to the audio PTS by slowly adjusting
                 * the video frame waiting time towards a better synchronization.
                 * Directly correcting the video waiting time by the audio offset
                 * introduces too much jitter and leads to juddering video playback.
                 */
                long audioOffsetCorrectionUs = 10000;
                if (audioOffsetUs > audioOffsetCorrectionUs) {
                  waitingTime -= audioOffsetCorrectionUs;
                } else if (audioOffsetUs < -audioOffsetCorrectionUs) {
                  waitingTime += audioOffsetCorrectionUs;
                }

                mAudioPlayback.setPlaybackSpeed((float) mTimeBase.getSpeed());
              }

              Log.d(TAG, "waiting time = " + waitingTime);

              /* If this is an online stream, notify the client of the buffer fill level.
               * The cached duration from the MediaExtractor returns the cached time from
               * the current position onwards, but the Android mediaPlayer returns the
               * total time consisting fo the current playback point and the length of
               * the prefetched data.
               */
              long cachedDuration = mVideoExtractor.getCachedDuration();
              if (cachedDuration != -1) {
                mEventHandler.sendMessage(
                    mEventHandler.obtainMessage(
                        MEDIA_BUFFERING_UPDATE,
                        (int)
                            (100d
                                / mVideoFormat.getLong(MediaFormat.KEY_DURATION)
                                * (mCurrentPosition + cachedDuration)),
                        0));
              }

              // slow down playback, if necessary, to keep frame rate
              if (waitingTime > 5000) {
                // sleep until it's time to render the next frame
                Thread.sleep(waitingTime / 1000);
              } else if (!preparing && waitingTime < 0) {
                // we weed to catch up time by skipping rendering of this frame
                // this doesn't gain enough time if playback speed is too high and decoder at full
                // load
                // TODO improve fast forward mode
                mEventHandler.sendMessage(
                    mEventHandler.obtainMessage(MEDIA_INFO, MEDIA_INFO_VIDEO_TRACK_LAGGING, 0));
                mTimeBase.startAt(mVideoInfo.presentationTimeUs);
              }
            }

            if (mBuffering) {
              mBuffering = false;
              mEventHandler.sendMessage(
                  mEventHandler.obtainMessage(MEDIA_INFO, MEDIA_INFO_BUFFERING_END, 0));
            }

            mVideoCodec.releaseOutputBuffer(outputBufIndex, render); // render picture

            if (mAudioExtractor != null && !mSeeking && !mPaused) {
              // TODO rewrite; this is just a quick and dirty hack
              long start = SystemClock.elapsedRealtime();
              while (mAudioPlayback.getBufferTimeUs() < 100000) {
                if (queueAudioSampleToCodec(mAudioExtractor)) {
                  decodeAudioSample();
                } else {
                  break;
                }
              }
              Log.d(TAG, "audio stream decode time " + (SystemClock.elapsedRealtime() - start));
            }

            if (preparing) {
              mEventHandler.sendEmptyMessage(MEDIA_PREPARED);
              preparing = false;
              mEventHandler.sendMessage(
                  mEventHandler.obtainMessage(MEDIA_INFO, MEDIA_INFO_VIDEO_RENDERING_START, 0));
            }

            if (mVideoOutputEos) {
              Log.d(TAG, "EOS video output");
              mEventHandler.sendEmptyMessage(MEDIA_PLAYBACK_COMPLETE);

              /* When playback reached the end of the stream and the last frame is
               * displayed, we go into paused state instead of finishing the thread
               * and wait until a new start or seek command is arriving. If no
               * command arrives, the thread stays sleeping until the player is
               * stopped.
               */
              mPaused = true;
              synchronized (this) {
                if (mAudioPlayback != null) mAudioPlayback.pause();
                while (mPaused && !mSeeking && !mSeekPrepare) {
                  this.wait();
                }
                if (mAudioPlayback != null) mAudioPlayback.play();

                // reset these flags so playback can continue
                mVideoInputEos = false;
                mVideoOutputEos = false;
                mAudioInputEos = false;
                mAudioOutputEos = false;

                // if no seek command but a start command arrived, seek to the start
                if (!mSeeking && !mSeekPrepare) {
                  seekTo(0);
                }
              }
            }
          } else if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            mVideoCodecOutputBuffers = mVideoCodec.getOutputBuffers();
            Log.d(TAG, "output buffers have changed.");
          } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            // NOTE: this is the format of the raw video output, not the video format as specified
            // by the container
            MediaFormat oformat = mVideoCodec.getOutputFormat();
            Log.d(TAG, "output format has changed to " + oformat);
          }
        }
      } catch (InterruptedException e) {
        Log.d(TAG, "decoder interrupted");
        interrupt();
      } catch (IllegalStateException e) {
        Log.e(TAG, "decoder error, too many instances?", e);
      }

      if (mAudioPlayback != null) mAudioPlayback.stopAndRelease();
      mVideoCodec.stop();
      mVideoCodec.release();
      if (mAudioFormat != null) {
        mAudioCodec.stop();
        mAudioCodec.release();
      }
      mVideoExtractor.release();

      Log.d(TAG, "decoder released");
    }