void teardown() {
   if (decoder != null) {
     decoder.release();
     decoder = null;
   }
   if (surface != null) {
     surface.release();
     surface = null;
   }
   if (extractor != null) {
     extractor.release();
     extractor = null;
   }
 }
    void setup() {
      int width = 0, height = 0;

      extractor = new MediaExtractor();

      try {
        extractor.setDataSource(mVideoFile.getPath());
      } catch (IOException e) {
        return;
      }

      for (int i = 0; i < extractor.getTrackCount(); i++) {
        MediaFormat format = extractor.getTrackFormat(i);
        String mime = format.getString(MediaFormat.KEY_MIME);
        width = format.getInteger(MediaFormat.KEY_WIDTH);
        height = format.getInteger(MediaFormat.KEY_HEIGHT);

        if (mime.startsWith("video/")) {
          extractor.selectTrack(i);
          try {
            decoder = MediaCodec.createDecoderByType(mime);
          } catch (IOException e) {
            continue;
          }
          // Decode to surface
          // decoder.configure(format, surface, null, 0);

          // Decode to offscreen surface
          surface = new CtsMediaOutputSurface(width, height);
          mMatBuffer = new MatBuffer(width, height);

          decoder.configure(format, surface.getSurface(), null, 0);
          break;
        }
      }

      if (decoder == null) {
        Log.e("VideoDecoderForOpenCV", "Can't find video info!");
        return;
      }
      valid = true;
    }
    @Override
    public void run() {
      setup();

      synchronized (setupSignal) {
        setupSignal.notify();
      }

      if (!valid) {
        return;
      }

      decoder.start();

      ByteBuffer[] inputBuffers = decoder.getInputBuffers();
      ByteBuffer[] outputBuffers = decoder.getOutputBuffers();
      MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();

      boolean isEOS = false;
      long startMs = System.currentTimeMillis();
      long timeoutUs = 10000;

      int iframe = 0;

      while (!Thread.interrupted()) {
        if (!isEOS) {
          int inIndex = decoder.dequeueInputBuffer(10000);
          if (inIndex >= 0) {
            ByteBuffer buffer = inputBuffers[inIndex];
            int sampleSize = extractor.readSampleData(buffer, 0);
            if (sampleSize < 0) {
              if (LOCAL_LOGD) {
                Log.d("VideoDecoderForOpenCV", "InputBuffer BUFFER_FLAG_END_OF_STREAM");
              }
              decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
              isEOS = true;
            } else {
              decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0);
              extractor.advance();
            }
          }
        }

        int outIndex = decoder.dequeueOutputBuffer(info, 10000);
        MediaFormat outFormat;
        switch (outIndex) {
          case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            if (LOCAL_LOGD) {
              Log.d("VideoDecoderForOpenCV", "INFO_OUTPUT_BUFFERS_CHANGED");
            }
            outputBuffers = decoder.getOutputBuffers();
            break;
          case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            outFormat = decoder.getOutputFormat();
            if (LOCAL_LOGD) {
              Log.d("VideoDecoderForOpenCV", "New format " + outFormat);
            }
            break;
          case MediaCodec.INFO_TRY_AGAIN_LATER:
            if (LOCAL_LOGD) {
              Log.d("VideoDecoderForOpenCV", "dequeueOutputBuffer timed out!");
            }
            break;
          default:
            ByteBuffer buffer = outputBuffers[outIndex];
            boolean doRender = (info.size != 0);

            // As soon as we call releaseOutputBuffer, the buffer will be forwarded
            // to SurfaceTexture to convert to a texture.  The API doesn't
            // guarantee that the texture will be available before the call
            // returns, so we need to wait for the onFrameAvailable callback to
            // fire.  If we don't wait, we risk rendering from the previous frame.
            decoder.releaseOutputBuffer(outIndex, doRender);

            if (doRender) {
              surface.awaitNewImage();
              surface.drawImage();
              if (LOCAL_LOGD) {
                Log.d("VideoDecoderForOpenCV", "Finish drawing a frame!");
              }
              if ((iframe++ % mDecimation) == 0) {
                // Send the frame for processing
                mMatBuffer.put();
              }
            }
            break;
        }

        if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
          if (LOCAL_LOGD) {
            Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM");
          }
          break;
        }
      }
      mMatBuffer.invalidate();

      decoder.stop();

      teardown();
      mThread = null;
    }