Пример #1
0
  /** Video encoding is done by a MediaCodec. But here we will use the buffer-to-surface method */
  @SuppressLint({"InlinedApi", "NewApi"})
  protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException {

    Log.d(TAG, "Video encoded using the MediaCodec API with a surface");

    // Updates the parameters of the camera if needed
    createCamera();
    updateCamera();

    // Estimates the framerate of the camera
    measureFramerate();

    EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);

    mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
    MediaFormat mediaFormat =
        MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
    mediaFormat.setInteger(
        MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    Surface surface = mMediaCodec.createInputSurface();
    ((SurfaceView) mSurfaceView).addMediaCodecSurface(surface);
    mMediaCodec.start();

    mStreaming = true;
  }
  /**
   * Edits a video file, saving the contents to a new file. This involves decoding and re-encoding,
   * not to mention conversions between YUV and RGB, and so may be lossy.
   *
   * <p>If we recognize the decoded format we can do this in Java code using the ByteBuffer[]
   * output, but it's not practical to support all OEM formats. By using a SurfaceTexture for output
   * and a Surface for input, we can avoid issues with obscure formats and can use a fragment shader
   * to do transformations.
   */
  private VideoChunks editVideoFile(VideoChunks inputData) {
    if (VERBOSE) Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight);
    VideoChunks outputData = new VideoChunks();
    MediaCodec decoder = null;
    MediaCodec encoder = null;
    InputSurface inputSurface = null;
    OutputSurface outputSurface = null;

    try {
      MediaFormat inputFormat = inputData.getMediaFormat();

      // Create an encoder format that matches the input format.  (Might be able to just
      // re-use the format used to generate the video, since we want it to be the same.)
      MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
      outputFormat.setInteger(
          MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
      outputFormat.setInteger(
          MediaFormat.KEY_BIT_RATE, inputFormat.getInteger(MediaFormat.KEY_BIT_RATE));
      outputFormat.setInteger(
          MediaFormat.KEY_FRAME_RATE, inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE));
      outputFormat.setInteger(
          MediaFormat.KEY_I_FRAME_INTERVAL,
          inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL));

      outputData.setMediaFormat(outputFormat);

      encoder = MediaCodec.createEncoderByType(MIME_TYPE);
      encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
      inputSurface = new InputSurface(encoder.createInputSurface());
      inputSurface.makeCurrent();
      encoder.start();

      // OutputSurface uses the EGL context created by InputSurface.
      decoder = MediaCodec.createDecoderByType(MIME_TYPE);
      outputSurface = new OutputSurface();
      outputSurface.changeFragmentShader(FRAGMENT_SHADER);
      decoder.configure(inputFormat, outputSurface.getSurface(), null, 0);
      decoder.start();

      editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData);
    } finally {
      if (VERBOSE) Log.d(TAG, "shutting down encoder, decoder");
      if (outputSurface != null) {
        outputSurface.release();
      }
      if (inputSurface != null) {
        inputSurface.release();
      }
      if (encoder != null) {
        encoder.stop();
        encoder.release();
      }
      if (decoder != null) {
        decoder.stop();
        decoder.release();
      }
    }

    return outputData;
  }
  @Override
  protected void prepare() throws IOException {
    if (DEBUG) Log.v(TAG, "prepare:");
    mTrackIndex = -1;
    mMuxerStarted = mIsEOS = false;
    // prepare MediaCodec for AAC encoding of audio data from inernal mic.
    final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE);
    if (audioCodecInfo == null) {
      Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
      return;
    }
    if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName());

    final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
    audioFormat.setInteger(
        MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
    audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    //		audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
    //      audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
    if (DEBUG) Log.i(TAG, "format: " + audioFormat);
    mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE);
    mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mMediaCodec.start();
    if (DEBUG) Log.i(TAG, "prepare finishing");
    if (mListener != null) {
      try {
        mListener.onPrepared(this);
      } catch (final Exception e) {
        Log.e(TAG, "prepare:", e);
      }
    }
  }
  /**
   * Generates a test video file, saving it as VideoChunks. We generate frames with GL to avoid
   * having to deal with multiple YUV formats.
   *
   * @return true on success, false on "soft" failure
   */
  private boolean generateVideoFile(VideoChunks output) {
    if (VERBOSE) Log.d(TAG, "generateVideoFile " + mWidth + "x" + mHeight);
    MediaCodec encoder = null;
    InputSurface inputSurface = null;

    try {
      MediaCodecInfo codecInfo = selectCodec(MIME_TYPE);
      if (codecInfo == null) {
        // Don't fail CTS if they don't have an AVC codec (not here, anyway).
        Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE);
        return false;
      }
      if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName());

      // We avoid the device-specific limitations on width and height by using values that
      // are multiples of 16, which all tested devices seem to be able to handle.
      MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);

      // Set some properties.  Failing to specify some of these can cause the MediaCodec
      // configure() call to throw an unhelpful exception.
      format.setInteger(
          MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
      format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
      format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
      format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
      if (VERBOSE) Log.d(TAG, "format: " + format);
      output.setMediaFormat(format);

      // Create a MediaCodec for the desired codec, then configure it as an encoder with
      // our desired properties.
      encoder = MediaCodec.createByCodecName(codecInfo.getName());
      encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
      inputSurface = new InputSurface(encoder.createInputSurface());
      inputSurface.makeCurrent();
      encoder.start();

      generateVideoData(encoder, inputSurface, output);
    } finally {
      if (encoder != null) {
        if (VERBOSE) Log.d(TAG, "releasing encoder");
        encoder.stop();
        encoder.release();
        if (VERBOSE) Log.d(TAG, "released encoder");
      }
      if (inputSurface != null) {
        inputSurface.release();
      }
    }

    return true;
  }
Пример #5
0
  /**
   * Create the display surface out of the encoder. The data to encoder will be fed from this
   * Surface itself.
   *
   * @return
   * @throws IOException
   */
  @TargetApi(19)
  private Surface createDisplaySurface() throws IOException {
    MediaFormat mMediaFormat =
        MediaFormat.createVideoFormat(CodecUtils.MIME_TYPE, CodecUtils.WIDTH, CodecUtils.HEIGHT);
    mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, (int) (1024 * 1024 * 0.5));
    mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
    mMediaFormat.setInteger(
        MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    Log.i(TAG, "Starting encoder");
    encoder = MediaCodec.createEncoderByType(CodecUtils.MIME_TYPE);
    encoder.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);

    Surface surface = encoder.createInputSurface();
    return surface;
  }
 public void prepareEncode() {
   MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
   // 描述视频格式的信息
   mediaFormat.setInteger(
       MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
   mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BITRATE); // 比特率
   mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); // 帧数
   mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); // 关键帧间隔时间
   try {
     mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
     mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
     mSurface = mEncoder.createInputSurface();
     mEncoder.start();
     mediaMuxer = new MediaMuxer(filePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
   } catch (IOException e) {
     e.printStackTrace();
   }
 }
 @SuppressLint("InlinedApi")
 private void maybeSetMaxInputSize(android.media.MediaFormat format, boolean codecIsAdaptive) {
   if (format.containsKey(android.media.MediaFormat.KEY_MAX_INPUT_SIZE)) {
     // Already set. The source of the format may know better, so do nothing.
     return;
   }
   int maxHeight = format.getInteger(android.media.MediaFormat.KEY_HEIGHT);
   if (codecIsAdaptive && format.containsKey(android.media.MediaFormat.KEY_MAX_HEIGHT)) {
     maxHeight = Math.max(maxHeight, format.getInteger(android.media.MediaFormat.KEY_MAX_HEIGHT));
   }
   int maxWidth = format.getInteger(android.media.MediaFormat.KEY_WIDTH);
   if (codecIsAdaptive && format.containsKey(android.media.MediaFormat.KEY_MAX_WIDTH)) {
     maxWidth = Math.max(maxHeight, format.getInteger(android.media.MediaFormat.KEY_MAX_WIDTH));
   }
   int maxPixels;
   int minCompressionRatio;
   switch (format.getString(android.media.MediaFormat.KEY_MIME)) {
     case MimeTypes.VIDEO_H263:
     case MimeTypes.VIDEO_MP4V:
       maxPixels = maxWidth * maxHeight;
       minCompressionRatio = 2;
       break;
     case MimeTypes.VIDEO_H264:
       if ("BRAVIA 4K 2015".equals(Util.MODEL)) {
         // The Sony BRAVIA 4k TV has input buffers that are too small for the calculated 4k video
         // maximum input size, so use the default value.
         return;
       }
       // Round up width/height to an integer number of macroblocks.
       maxPixels = ((maxWidth + 15) / 16) * ((maxHeight + 15) / 16) * 16 * 16;
       minCompressionRatio = 2;
       break;
     case MimeTypes.VIDEO_VP8:
       // VPX does not specify a ratio so use the values from the platform's SoftVPX.cpp.
       maxPixels = maxWidth * maxHeight;
       minCompressionRatio = 2;
       break;
     case MimeTypes.VIDEO_H265:
     case MimeTypes.VIDEO_VP9:
       maxPixels = maxWidth * maxHeight;
       minCompressionRatio = 4;
       break;
     default:
       // Leave the default max input size.
       return;
   }
   // Estimate the maximum input size assuming three channel 4:2:0 subsampled input frames.
   int maxInputSize = (maxPixels * 3) / (2 * minCompressionRatio);
   format.setInteger(android.media.MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
 }
  /**
   * Configures encoder and muxer state, and prepares the input Surface. Initializes mEncoder,
   * mMuxer, mRecordingSurface, mBufferInfo, mTrackIndex, and mMuxerStarted.
   */
  private void configureMediaCodecEncoder() {
    mBufferInfo = new MediaCodec.BufferInfo();
    MediaFormat format =
        MediaFormat.createVideoFormat(MIME_TYPE, mStreamSize.getWidth(), mStreamSize.getHeight());
    /**
     * Set encoding properties. Failing to specify some of these can cause the MediaCodec
     * configure() call to throw an exception.
     */
    format.setInteger(
        MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    format.setInteger(MediaFormat.KEY_BIT_RATE, mEncBitRate);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
    Log.i(TAG, "configure video encoding format: " + format);

    // Create/configure a MediaCodec encoder.
    mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
    mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mRecordingSurface = mEncoder.createInputSurface();
    mEncoder.start();

    String outputFileName = getOutputMediaFileName();
    if (outputFileName == null) {
      throw new IllegalStateException("Failed to get video output file");
    }

    /**
     * Create a MediaMuxer. We can't add the video track and start() the muxer until the encoder
     * starts and notifies the new media format.
     */
    try {
      mMuxer = new MediaMuxer(outputFileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
    } catch (IOException ioe) {
      throw new IllegalStateException("MediaMuxer creation failed", ioe);
    }
    mMuxerStarted = false;
  }
  /** Configures encoder and muxer state, and prepares the input Surface. */
  public VideoEncoderCore(int width, int height, int bitRate, Muxer muxer) throws IOException {
    mMuxer = muxer;
    mBufferInfo = new MediaCodec.BufferInfo();

    MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);

    // Set some properties.  Failing to specify some of these can cause the MediaCodec
    // configure() call to throw an unhelpful exception.
    format.setInteger(
        MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
    format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
    format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
    format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
    if (VERBOSE) Log.d(TAG, "format: " + format);

    // Create a MediaCodec encoder, and configure it with our format.  Get a Surface
    // we can use for input and wrap it with a class that handles the EGL work.
    mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
    mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mInputSurface = mEncoder.createInputSurface();
    mEncoder.start();

    mTrackIndex = -1;
  }
  // Pass null in |sharedContext| to configure the codec for ByteBuffer output.
  private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) {
    if (mediaCodecThread != null) {
      throw new RuntimeException("Forgot to release()?");
    }
    useSurface = (sharedContext != null);
    String mime = null;
    String[] supportedCodecPrefixes = null;
    if (type == VideoCodecType.VIDEO_CODEC_VP8) {
      mime = VP8_MIME_TYPE;
      supportedCodecPrefixes = supportedVp8HwCodecPrefixes;
    } else if (type == VideoCodecType.VIDEO_CODEC_H264) {
      mime = H264_MIME_TYPE;
      supportedCodecPrefixes = supportedH264HwCodecPrefixes;
    } else {
      throw new RuntimeException("Non supported codec " + type);
    }
    DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes);
    if (properties == null) {
      throw new RuntimeException("Cannot find HW decoder for " + type);
    }
    Logging.d(
        TAG,
        "Java initDecode: "
            + type
            + " : "
            + width
            + " x "
            + height
            + ". Color: 0x"
            + Integer.toHexString(properties.colorFormat)
            + ". Use Surface: "
            + useSurface);
    if (sharedContext != null) {
      Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext);
    }
    mediaCodecThread = Thread.currentThread();
    try {
      Surface decodeSurface = null;
      this.width = width;
      this.height = height;
      stride = width;
      sliceHeight = height;

      if (useSurface) {
        // Create shared EGL context.
        eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER);
        eglBase.createDummyPbufferSurface();
        eglBase.makeCurrent();

        // Create output surface
        textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES);
        Logging.d(TAG, "Video decoder TextureID = " + textureID);
        surfaceTexture = new SurfaceTexture(textureID);
        surface = new Surface(surfaceTexture);
        decodeSurface = surface;
      }

      MediaFormat format = MediaFormat.createVideoFormat(mime, width, height);
      if (!useSurface) {
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat);
      }
      Logging.d(TAG, "  Format: " + format);
      mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName);
      if (mediaCodec == null) {
        return false;
      }
      mediaCodec.configure(format, decodeSurface, null, 0);
      mediaCodec.start();
      colorFormat = properties.colorFormat;
      outputBuffers = mediaCodec.getOutputBuffers();
      inputBuffers = mediaCodec.getInputBuffers();
      Logging.d(
          TAG,
          "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length);
      return true;
    } catch (IllegalStateException e) {
      Logging.e(TAG, "initDecode failed", e);
      return false;
    }
  }
Пример #11
0
  /** Video encoding is done by a MediaCodec. */
  @SuppressLint("NewApi")
  protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {

    Log.d(TAG, "Video encoded using the MediaCodec API with a buffer");

    // Updates the parameters of the camera if needed
    createCamera();
    updateCamera();

    // Estimates the framerate of the camera
    measureFramerate();

    // Starts the preview if needed
    if (!mPreviewStarted) {
      try {
        mCamera.startPreview();
        mPreviewStarted = true;
      } catch (RuntimeException e) {
        destroyCamera();
        throw e;
      }
    }

    EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
    final NV21Convertor convertor = debugger.getNV21Convertor();

    mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
    MediaFormat mediaFormat =
        MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
    mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
    mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);
    mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, debugger.getEncoderColorFormat());
    mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
    mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mMediaCodec.start();

    Camera.PreviewCallback callback =
        new Camera.PreviewCallback() {
          long now = System.nanoTime() / 1000, oldnow = now, i = 0;
          ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();

          @Override
          public void onPreviewFrame(byte[] data, Camera camera) {
            oldnow = now;
            now = System.nanoTime() / 1000;
            if (i++ > 3) {
              i = 0;
              // Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
            }
            try {
              int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
              if (bufferIndex >= 0) {
                inputBuffers[bufferIndex].clear();
                if (data == null)
                  Log.e(TAG, "Symptom of the \"Callback buffer was to small\" problem...");
                else convertor.convert(data, inputBuffers[bufferIndex]);
                mMediaCodec.queueInputBuffer(
                    bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
              } else {
                Log.e(TAG, "No buffer available !");
              }
            } finally {
              mCamera.addCallbackBuffer(data);
            }
          }
        };

    for (int i = 0; i < 10; i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
    mCamera.setPreviewCallbackWithBuffer(callback);

    mStreaming = true;
  }
  @Override
  @SuppressLint({"InlinedApi", "NewApi"})
  protected void encodeWithFFmpeg() throws IOException {

    final int bufferSize =
        AudioRecord.getMinBufferSize(
                mQuality.samplingRate, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT)
            * 2;

    ((AACLATMPacketizer) mPacketizer).setSamplingRate(mQuality.samplingRate);

    mAudioRecord =
        new AudioRecord(
            MediaRecorder.AudioSource.MIC,
            mQuality.samplingRate,
            AudioFormat.CHANNEL_IN_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            bufferSize);
    mMediaCodec = MediaCodec.createEncoderByType("audio/mp4a-latm");
    MediaFormat format = new MediaFormat();
    format.setString(MediaFormat.KEY_MIME, "audio/mp4a-latm");
    format.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitRate);
    format.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    format.setInteger(MediaFormat.KEY_SAMPLE_RATE, mQuality.samplingRate);
    format.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    format.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, bufferSize);
    mMediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    mAudioRecord.startRecording();
    mMediaCodec.start();

    final MediaCodecInputStream inputStream = new MediaCodecInputStream(mMediaCodec);
    final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();

    mThread =
        new Thread(
            new Runnable() {
              @Override
              public void run() {
                int len = 0, bufferIndex = 0;
                try {
                  while (!Thread.interrupted()) {
                    bufferIndex = mMediaCodec.dequeueInputBuffer(10000);
                    if (bufferIndex >= 0) {
                      inputBuffers[bufferIndex].clear();
                      len = mAudioRecord.read(inputBuffers[bufferIndex], bufferSize);
                      if (len == AudioRecord.ERROR_INVALID_OPERATION
                          || len == AudioRecord.ERROR_BAD_VALUE) {
                        Log.e(TAG, "An error occured with the AudioRecord API !");
                      } else {
                        // Log.v(TAG,"Pushing raw audio to the decoder: len="+len+" bs:
                        // "+inputBuffers[bufferIndex].capacity());
                        mMediaCodec.queueInputBuffer(
                            bufferIndex, 0, len, System.nanoTime() / 1000, 0);
                      }
                    }
                  }
                } catch (RuntimeException e) {
                  e.printStackTrace();
                }
              }
            });

    mThread.start();

    // The packetizer encapsulates this stream in an RTP stream and send it
    // over the network
    mPacketizer.setInputStream(inputStream);
    mPacketizer.start();

    mStreaming = true;
  }