/** * Edits a video file, saving the contents to a new file. This involves decoding and re-encoding, * not to mention conversions between YUV and RGB, and so may be lossy. * * <p>If we recognize the decoded format we can do this in Java code using the ByteBuffer[] * output, but it's not practical to support all OEM formats. By using a SurfaceTexture for output * and a Surface for input, we can avoid issues with obscure formats and can use a fragment shader * to do transformations. */ private VideoChunks editVideoFile(VideoChunks inputData) { if (VERBOSE) Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight); VideoChunks outputData = new VideoChunks(); MediaCodec decoder = null; MediaCodec encoder = null; InputSurface inputSurface = null; OutputSurface outputSurface = null; try { MediaFormat inputFormat = inputData.getMediaFormat(); // Create an encoder format that matches the input format. (Might be able to just // re-use the format used to generate the video, since we want it to be the same.) MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); outputFormat.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); outputFormat.setInteger( MediaFormat.KEY_BIT_RATE, inputFormat.getInteger(MediaFormat.KEY_BIT_RATE)); outputFormat.setInteger( MediaFormat.KEY_FRAME_RATE, inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE)); outputFormat.setInteger( MediaFormat.KEY_I_FRAME_INTERVAL, inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL)); outputData.setMediaFormat(outputFormat); encoder = MediaCodec.createEncoderByType(MIME_TYPE); encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); inputSurface = new InputSurface(encoder.createInputSurface()); inputSurface.makeCurrent(); encoder.start(); // OutputSurface uses the EGL context created by InputSurface. decoder = MediaCodec.createDecoderByType(MIME_TYPE); outputSurface = new OutputSurface(); outputSurface.changeFragmentShader(FRAGMENT_SHADER); decoder.configure(inputFormat, outputSurface.getSurface(), null, 0); decoder.start(); editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData); } finally { if (VERBOSE) Log.d(TAG, "shutting down encoder, decoder"); if (outputSurface != null) { outputSurface.release(); } if (inputSurface != null) { inputSurface.release(); } if (encoder != null) { encoder.stop(); encoder.release(); } if (decoder != null) { decoder.stop(); decoder.release(); } } return outputData; }
/** * Checks the video file to see if the contents match our expectations. We decode the video to a * Surface and check the pixels with GL. */ private void checkVideoFile(VideoChunks inputData) { OutputSurface surface = null; MediaCodec decoder = null; mLargestColorDelta = -1; if (VERBOSE) Log.d(TAG, "checkVideoFile"); try { surface = new OutputSurface(mWidth, mHeight); MediaFormat format = inputData.getMediaFormat(); decoder = MediaCodec.createDecoderByType(MIME_TYPE); decoder.configure(format, surface.getSurface(), null, 0); decoder.start(); int badFrames = checkVideoData(inputData, decoder, surface); if (badFrames != 0) { fail("Found " + badFrames + " bad frames"); } } finally { if (surface != null) { surface.release(); } if (decoder != null) { decoder.stop(); decoder.release(); } Log.i(TAG, "Largest color delta: " + mLargestColorDelta); } }
private MediaCodecWrapper(MediaCodec codec) { mDecoder = codec; codec.start(); mInputBuffers = codec.getInputBuffers(); mOutputBuffers = codec.getOutputBuffers(); mOutputBufferInfo = new MediaCodec.BufferInfo[mOutputBuffers.length]; mAvailableInputBuffers = new ArrayDeque<Integer>(mOutputBuffers.length); mAvailableOutputBuffers = new ArrayDeque<Integer>(mInputBuffers.length); }
/** * Generates a test video file, saving it as VideoChunks. We generate frames with GL to avoid * having to deal with multiple YUV formats. * * @return true on success, false on "soft" failure */ private boolean generateVideoFile(VideoChunks output) { if (VERBOSE) Log.d(TAG, "generateVideoFile " + mWidth + "x" + mHeight); MediaCodec encoder = null; InputSurface inputSurface = null; try { MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); if (codecInfo == null) { // Don't fail CTS if they don't have an AVC codec (not here, anyway). Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return false; } if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName()); // We avoid the device-specific limitations on width and height by using values that // are multiples of 16, which all tested devices seem to be able to handle. MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); if (VERBOSE) Log.d(TAG, "format: " + format); output.setMediaFormat(format); // Create a MediaCodec for the desired codec, then configure it as an encoder with // our desired properties. encoder = MediaCodec.createByCodecName(codecInfo.getName()); encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); inputSurface = new InputSurface(encoder.createInputSurface()); inputSurface.makeCurrent(); encoder.start(); generateVideoData(encoder, inputSurface, output); } finally { if (encoder != null) { if (VERBOSE) Log.d(TAG, "releasing encoder"); encoder.stop(); encoder.release(); if (VERBOSE) Log.d(TAG, "released encoder"); } if (inputSurface != null) { inputSurface.release(); } } return true; }
/** * @param media_extractor * @param trackIndex * @return */ protected MediaCodec internal_start_video( final MediaExtractor media_extractor, final int trackIndex) { if (DEBUG) Log.v(TAG, "internal_start_video:"); MediaCodec codec = null; if (trackIndex >= 0) { final MediaFormat format = media_extractor.getTrackFormat(trackIndex); final String mime = format.getString(MediaFormat.KEY_MIME); codec = MediaCodec.createDecoderByType(mime); codec.configure(format, mOutputSurface, null, 0); codec.start(); if (DEBUG) Log.v(TAG, "internal_start_video:codec started"); } return codec; }
public void prepareEncode() { MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); // 描述视频格式的信息 mediaFormat.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BITRATE); // 比特率 mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); // 帧数 mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); // 关键帧间隔时间 try { mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mSurface = mEncoder.createInputSurface(); mEncoder.start(); mediaMuxer = new MediaMuxer(filePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); } catch (IOException e) { e.printStackTrace(); } }
@TargetApi(19) public void startDisplayManager() { DisplayManager mDisplayManager = (DisplayManager) getSystemService(Context.DISPLAY_SERVICE); Surface encoderInputSurface = null; try { encoderInputSurface = createDisplaySurface(); } catch (IOException e) { e.printStackTrace(); } if (Build.VERSION.SDK_INT < Build.VERSION_CODES.LOLLIPOP) { virtualDisplay = mDisplayManager.createVirtualDisplay( "Remote Droid", CodecUtils.WIDTH, CodecUtils.HEIGHT, 50, encoderInputSurface, DisplayManager.VIRTUAL_DISPLAY_FLAG_PUBLIC | DisplayManager.VIRTUAL_DISPLAY_FLAG_SECURE); } else { if (MainActivity.mMediaProjection != null) { virtualDisplay = MainActivity.mMediaProjection.createVirtualDisplay( "Remote Droid", CodecUtils.WIDTH, CodecUtils.HEIGHT, 50, DisplayManager.VIRTUAL_DISPLAY_FLAG_AUTO_MIRROR, encoderInputSurface, null, null); } else { showToast("Something went wrong. Please restart the app."); } } encoder.start(); }
/** * Configures encoder and muxer state, and prepares the input Surface. Initializes mEncoder, * mMuxer, mRecordingSurface, mBufferInfo, mTrackIndex, and mMuxerStarted. */ private void configureMediaCodecEncoder() { mBufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mStreamSize.getWidth(), mStreamSize.getHeight()); /** * Set encoding properties. Failing to specify some of these can cause the MediaCodec * configure() call to throw an exception. */ format.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, mEncBitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); Log.i(TAG, "configure video encoding format: " + format); // Create/configure a MediaCodec encoder. mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mRecordingSurface = mEncoder.createInputSurface(); mEncoder.start(); String outputFileName = getOutputMediaFileName(); if (outputFileName == null) { throw new IllegalStateException("Failed to get video output file"); } /** * Create a MediaMuxer. We can't add the video track and start() the muxer until the encoder * starts and notifies the new media format. */ try { mMuxer = new MediaMuxer(outputFileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); } catch (IOException ioe) { throw new IllegalStateException("MediaMuxer creation failed", ioe); } mMuxerStarted = false; }
// Pass null in |sharedContext| to configure the codec for ByteBuffer output. private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) { if (mediaCodecThread != null) { throw new RuntimeException("Forgot to release()?"); } useSurface = (sharedContext != null); String mime = null; String[] supportedCodecPrefixes = null; if (type == VideoCodecType.VIDEO_CODEC_VP8) { mime = VP8_MIME_TYPE; supportedCodecPrefixes = supportedVp8HwCodecPrefixes; } else if (type == VideoCodecType.VIDEO_CODEC_H264) { mime = H264_MIME_TYPE; supportedCodecPrefixes = supportedH264HwCodecPrefixes; } else { throw new RuntimeException("Non supported codec " + type); } DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); if (properties == null) { throw new RuntimeException("Cannot find HW decoder for " + type); } Logging.d( TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x" + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface); if (sharedContext != null) { Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); } mediaCodecThread = Thread.currentThread(); try { Surface decodeSurface = null; this.width = width; this.height = height; stride = width; sliceHeight = height; if (useSurface) { // Create shared EGL context. eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); eglBase.createDummyPbufferSurface(); eglBase.makeCurrent(); // Create output surface textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); Logging.d(TAG, "Video decoder TextureID = " + textureID); surfaceTexture = new SurfaceTexture(textureID); surface = new Surface(surfaceTexture); decodeSurface = surface; } MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); if (!useSurface) { format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); } Logging.d(TAG, " Format: " + format); mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName); if (mediaCodec == null) { return false; } mediaCodec.configure(format, decodeSurface, null, 0); mediaCodec.start(); colorFormat = properties.colorFormat; outputBuffers = mediaCodec.getOutputBuffers(); inputBuffers = mediaCodec.getInputBuffers(); Logging.d( TAG, "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length); return true; } catch (IllegalStateException e) { Logging.e(TAG, "initDecode failed", e); return false; } }
@CalledByNative private static boolean decodeAudioFile( Context ctx, int nativeMediaCodecBridge, int inputFD, long dataSize) { if (dataSize < 0 || dataSize > 0x7fffffff) return false; MediaExtractor extractor = new MediaExtractor(); ParcelFileDescriptor encodedFD; encodedFD = ParcelFileDescriptor.adoptFd(inputFD); try { extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize); } catch (Exception e) { e.printStackTrace(); encodedFD.detachFd(); return false; } if (extractor.getTrackCount() <= 0) { encodedFD.detachFd(); return false; } MediaFormat format = extractor.getTrackFormat(0); // Number of channels specified in the file int inputChannelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); // Number of channels the decoder will provide. (Not // necessarily the same as inputChannelCount. See // crbug.com/266006.) int outputChannelCount = inputChannelCount; int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); String mime = format.getString(MediaFormat.KEY_MIME); long durationMicroseconds = 0; if (format.containsKey(MediaFormat.KEY_DURATION)) { try { durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION); } catch (Exception e) { Log.d(LOG_TAG, "Cannot get duration"); } } if (DEBUG) { Log.d( LOG_TAG, "Tracks: " + extractor.getTrackCount() + " Rate: " + sampleRate + " Channels: " + inputChannelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec"); } nativeInitializeDestination( nativeMediaCodecBridge, inputChannelCount, sampleRate, durationMicroseconds); // Create decoder MediaCodec codec = MediaCodec.createDecoderByType(mime); codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */); codec.start(); ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers(); // A track must be selected and will be used to read samples. extractor.selectTrack(0); boolean sawInputEOS = false; boolean sawOutputEOS = false; // Keep processing until the output is done. while (!sawOutputEOS) { if (!sawInputEOS) { // Input side int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS); if (inputBufIndex >= 0) { ByteBuffer dstBuf = codecInputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0); long presentationTimeMicroSec = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeMicroSec = extractor.getSampleTime(); } codec.queueInputBuffer( inputBufIndex, 0, /* offset */ sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (!sawInputEOS) { extractor.advance(); } } } // Output side MediaCodec.BufferInfo info = new BufferInfo(); final int outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS); if (outputBufIndex >= 0) { ByteBuffer buf = codecOutputBuffers[outputBufIndex]; if (info.size > 0) { nativeOnChunkDecoded( nativeMediaCodecBridge, buf, info.size, inputChannelCount, outputChannelCount); } buf.clear(); codec.releaseOutputBuffer(outputBufIndex, false /* render */); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { codecOutputBuffers = codec.getOutputBuffers(); } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = codec.getOutputFormat(); outputChannelCount = newFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); Log.d(LOG_TAG, "output format changed to " + newFormat); } } encodedFD.detachFd(); codec.stop(); codec.release(); codec = null; return true; }
@Override public void run() { setup(); synchronized (setupSignal) { setupSignal.notify(); } if (!valid) { return; } decoder.start(); ByteBuffer[] inputBuffers = decoder.getInputBuffers(); ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); boolean isEOS = false; long startMs = System.currentTimeMillis(); long timeoutUs = 10000; int iframe = 0; while (!Thread.interrupted()) { if (!isEOS) { int inIndex = decoder.dequeueInputBuffer(10000); if (inIndex >= 0) { ByteBuffer buffer = inputBuffers[inIndex]; int sampleSize = extractor.readSampleData(buffer, 0); if (sampleSize < 0) { if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "InputBuffer BUFFER_FLAG_END_OF_STREAM"); } decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); isEOS = true; } else { decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0); extractor.advance(); } } } int outIndex = decoder.dequeueOutputBuffer(info, 10000); MediaFormat outFormat; switch (outIndex) { case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "INFO_OUTPUT_BUFFERS_CHANGED"); } outputBuffers = decoder.getOutputBuffers(); break; case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: outFormat = decoder.getOutputFormat(); if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "New format " + outFormat); } break; case MediaCodec.INFO_TRY_AGAIN_LATER: if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "dequeueOutputBuffer timed out!"); } break; default: ByteBuffer buffer = outputBuffers[outIndex]; boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't // guarantee that the texture will be available before the call // returns, so we need to wait for the onFrameAvailable callback to // fire. If we don't wait, we risk rendering from the previous frame. decoder.releaseOutputBuffer(outIndex, doRender); if (doRender) { surface.awaitNewImage(); surface.drawImage(); if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "Finish drawing a frame!"); } if ((iframe++ % mDecimation) == 0) { // Send the frame for processing mMatBuffer.put(); } } break; } if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM"); } break; } } mMatBuffer.invalidate(); decoder.stop(); teardown(); mThread = null; }
@Override public void run() { isDecoding = true; codec.start(); @SuppressWarnings("deprecation") ByteBuffer[] inputBuffers = codec.getInputBuffers(); @SuppressWarnings("deprecation") ByteBuffer[] outputBuffers = codec.getOutputBuffers(); boolean sawInputEOS = false; boolean sawOutputEOS = false; while (!sawInputEOS && !sawOutputEOS && continuing) { if (state == State.PAUSED) { try { synchronized (decoderLock) { decoderLock.wait(); } } catch (InterruptedException e) { // Purposely not doing anything here } continue; } if (sonic != null) { sonic.setSpeed(speed); sonic.setPitch(1); } int inputBufIndex = codec.dequeueInputBuffer(200); if (inputBufIndex >= 0) { ByteBuffer dstBuf = inputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0); long presentationTimeUs = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeUs = extractor.getSampleTime(); } codec.queueInputBuffer( inputBufIndex, 0, sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (flushCodec) { codec.flush(); flushCodec = false; } if (!sawInputEOS) { extractor.advance(); } } final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); byte[] modifiedSamples = new byte[info.size]; int res; //noinspection deprecation do { res = codec.dequeueOutputBuffer(info, 200); if (res >= 0) { final byte[] chunk = new byte[info.size]; outputBuffers[res].get(chunk); outputBuffers[res].clear(); if (chunk.length > 0) { sonic.writeBytesToStream(chunk, chunk.length); } else { sonic.flushStream(); } int available = sonic.availableBytes(); if (available > 0) { if (modifiedSamples.length < available) { modifiedSamples = new byte[available]; } sonic.readBytesFromStream(modifiedSamples, available); track.write(modifiedSamples, 0, available); } codec.releaseOutputBuffer(res, false); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else //noinspection deprecation if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { //noinspection deprecation outputBuffers = codec.getOutputBuffers(); } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { track.stop(); lock.lock(); try { track.release(); final MediaFormat oFormat = codec.getOutputFormat(); initDevice( oFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), oFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT)); //noinspection deprecation outputBuffers = codec.getOutputBuffers(); track.play(); } catch (IOException e) { e.printStackTrace(); } finally { lock.unlock(); } } } while (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED); } codec.stop(); track.stop(); isDecoding = false; if (continuing && (sawInputEOS || sawOutputEOS)) { state = State.PLAYBACK_COMPLETED; L.d(TAG, "State changed to: " + state); Thread t = new Thread( new Runnable() { @Override public void run() { if (onCompletionListener != null) { onCompletionListener.onCompletion(); } stayAwake(false); } }); t.setDaemon(true); t.start(); } synchronized (decoderLock) { decoderLock.notifyAll(); } }