/** * Edits a video file, saving the contents to a new file. This involves decoding and re-encoding, * not to mention conversions between YUV and RGB, and so may be lossy. * * <p>If we recognize the decoded format we can do this in Java code using the ByteBuffer[] * output, but it's not practical to support all OEM formats. By using a SurfaceTexture for output * and a Surface for input, we can avoid issues with obscure formats and can use a fragment shader * to do transformations. */ private VideoChunks editVideoFile(VideoChunks inputData) { if (VERBOSE) Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight); VideoChunks outputData = new VideoChunks(); MediaCodec decoder = null; MediaCodec encoder = null; InputSurface inputSurface = null; OutputSurface outputSurface = null; try { MediaFormat inputFormat = inputData.getMediaFormat(); // Create an encoder format that matches the input format. (Might be able to just // re-use the format used to generate the video, since we want it to be the same.) MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); outputFormat.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); outputFormat.setInteger( MediaFormat.KEY_BIT_RATE, inputFormat.getInteger(MediaFormat.KEY_BIT_RATE)); outputFormat.setInteger( MediaFormat.KEY_FRAME_RATE, inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE)); outputFormat.setInteger( MediaFormat.KEY_I_FRAME_INTERVAL, inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL)); outputData.setMediaFormat(outputFormat); encoder = MediaCodec.createEncoderByType(MIME_TYPE); encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); inputSurface = new InputSurface(encoder.createInputSurface()); inputSurface.makeCurrent(); encoder.start(); // OutputSurface uses the EGL context created by InputSurface. decoder = MediaCodec.createDecoderByType(MIME_TYPE); outputSurface = new OutputSurface(); outputSurface.changeFragmentShader(FRAGMENT_SHADER); decoder.configure(inputFormat, outputSurface.getSurface(), null, 0); decoder.start(); editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData); } finally { if (VERBOSE) Log.d(TAG, "shutting down encoder, decoder"); if (outputSurface != null) { outputSurface.release(); } if (inputSurface != null) { inputSurface.release(); } if (encoder != null) { encoder.stop(); encoder.release(); } if (decoder != null) { decoder.stop(); decoder.release(); } } return outputData; }
/** * Generates a test video file, saving it as VideoChunks. We generate frames with GL to avoid * having to deal with multiple YUV formats. * * @return true on success, false on "soft" failure */ private boolean generateVideoFile(VideoChunks output) { if (VERBOSE) Log.d(TAG, "generateVideoFile " + mWidth + "x" + mHeight); MediaCodec encoder = null; InputSurface inputSurface = null; try { MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); if (codecInfo == null) { // Don't fail CTS if they don't have an AVC codec (not here, anyway). Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return false; } if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName()); // We avoid the device-specific limitations on width and height by using values that // are multiples of 16, which all tested devices seem to be able to handle. MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); if (VERBOSE) Log.d(TAG, "format: " + format); output.setMediaFormat(format); // Create a MediaCodec for the desired codec, then configure it as an encoder with // our desired properties. encoder = MediaCodec.createByCodecName(codecInfo.getName()); encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); inputSurface = new InputSurface(encoder.createInputSurface()); inputSurface.makeCurrent(); encoder.start(); generateVideoData(encoder, inputSurface, output); } finally { if (encoder != null) { if (VERBOSE) Log.d(TAG, "releasing encoder"); encoder.stop(); encoder.release(); if (VERBOSE) Log.d(TAG, "released encoder"); } if (inputSurface != null) { inputSurface.release(); } } return true; }
/** * Create the display surface out of the encoder. The data to encoder will be fed from this * Surface itself. * * @return * @throws IOException */ @TargetApi(19) private Surface createDisplaySurface() throws IOException { MediaFormat mMediaFormat = MediaFormat.createVideoFormat(CodecUtils.MIME_TYPE, CodecUtils.WIDTH, CodecUtils.HEIGHT); mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, (int) (1024 * 1024 * 0.5)); mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); mMediaFormat.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); Log.i(TAG, "Starting encoder"); encoder = MediaCodec.createEncoderByType(CodecUtils.MIME_TYPE); encoder.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); Surface surface = encoder.createInputSurface(); return surface; }
public void prepareEncode() { MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); // 描述视频格式的信息 mediaFormat.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BITRATE); // 比特率 mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); // 帧数 mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); // 关键帧间隔时间 try { mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mSurface = mEncoder.createInputSurface(); mEncoder.start(); mediaMuxer = new MediaMuxer(filePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); } catch (IOException e) { e.printStackTrace(); } }
/** * Configures encoder and muxer state, and prepares the input Surface. Initializes mEncoder, * mMuxer, mRecordingSurface, mBufferInfo, mTrackIndex, and mMuxerStarted. */ private void configureMediaCodecEncoder() { mBufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mStreamSize.getWidth(), mStreamSize.getHeight()); /** * Set encoding properties. Failing to specify some of these can cause the MediaCodec * configure() call to throw an exception. */ format.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, mEncBitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); Log.i(TAG, "configure video encoding format: " + format); // Create/configure a MediaCodec encoder. mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mRecordingSurface = mEncoder.createInputSurface(); mEncoder.start(); String outputFileName = getOutputMediaFileName(); if (outputFileName == null) { throw new IllegalStateException("Failed to get video output file"); } /** * Create a MediaMuxer. We can't add the video track and start() the muxer until the encoder * starts and notifies the new media format. */ try { mMuxer = new MediaMuxer(outputFileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); } catch (IOException ioe) { throw new IllegalStateException("MediaMuxer creation failed", ioe); } mMuxerStarted = false; }