private final void handleStart() { if (DEBUG) Log.v(TAG, "handleStart:"); synchronized (mSync) { if (mState != STATE_PREPARED) throw new RuntimeException("invalid state:" + mState); mState = STATE_PLAYING; } if (mRequestTime > 0) { handleSeek(mRequestTime); } previousVideoPresentationTimeUs = -1; mVideoInputDone = mVideoOutputDone = true; Thread videoThread = null; if (mVideoTrackIndex >= 0) { final MediaCodec codec = internal_start_video(mVideoMediaExtractor, mVideoTrackIndex); if (codec != null) { mVideoMediaCodec = codec; mVideoBufferInfo = new MediaCodec.BufferInfo(); mVideoInputBuffers = codec.getInputBuffers(); mVideoOutputBuffers = codec.getOutputBuffers(); } mVideoInputDone = mVideoOutputDone = false; videoThread = new Thread(mVideoTask, "VideoTask"); } if (videoThread != null) videoThread.start(); }
private MediaCodecWrapper(MediaCodec codec) { mDecoder = codec; codec.start(); mInputBuffers = codec.getInputBuffers(); mOutputBuffers = codec.getOutputBuffers(); mOutputBufferInfo = new MediaCodec.BufferInfo[mOutputBuffers.length]; mAvailableInputBuffers = new ArrayDeque<Integer>(mOutputBuffers.length); mAvailableOutputBuffers = new ArrayDeque<Integer>(mInputBuffers.length); }
// Dequeue and return an output buffer index, -1 if no output // buffer available or -2 if error happened. private DecoderOutputBufferInfo dequeueOutputBuffer(int dequeueTimeoutUs) { checkOnMediaCodecThread(); try { MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { outputBuffers = mediaCodec.getOutputBuffers(); Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat format = mediaCodec.getOutputFormat(); Logging.d(TAG, "Decoder format changed: " + format.toString()); width = format.getInteger(MediaFormat.KEY_WIDTH); height = format.getInteger(MediaFormat.KEY_HEIGHT); if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); // Check if new color space is supported. boolean validColorFormat = false; for (int supportedColorFormat : supportedColorList) { if (colorFormat == supportedColorFormat) { validColorFormat = true; break; } } if (!validColorFormat) { Logging.e(TAG, "Non supported color format"); return new DecoderOutputBufferInfo(-1, 0, 0, -1); } } if (format.containsKey("stride")) { stride = format.getInteger("stride"); } if (format.containsKey("slice-height")) { sliceHeight = format.getInteger("slice-height"); } Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight); stride = Math.max(width, stride); sliceHeight = Math.max(height, sliceHeight); } result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); } if (result >= 0) { return new DecoderOutputBufferInfo(result, info.offset, info.size, info.presentationTimeUs); } return null; } catch (IllegalStateException e) { Logging.e(TAG, "dequeueOutputBuffer failed", e); return new DecoderOutputBufferInfo(-1, 0, 0, -1); } }
/** Synchronize this object's state with the internal state of the wrapped MediaCodec. */ private void update() { // BEGIN_INCLUDE(update_codec_state) int index; // Get valid input buffers from the codec to fill later in the same order they were // made available by the codec. while ((index = mDecoder.dequeueInputBuffer(0)) != MediaCodec.INFO_TRY_AGAIN_LATER) { mAvailableInputBuffers.add(index); } // Likewise with output buffers. If the output buffers have changed, start using the // new set of output buffers. If the output format has changed, notify listeners. MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); while ((index = mDecoder.dequeueOutputBuffer(info, 0)) != MediaCodec.INFO_TRY_AGAIN_LATER) { switch (index) { case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: mOutputBuffers = mDecoder.getOutputBuffers(); mOutputBufferInfo = new MediaCodec.BufferInfo[mOutputBuffers.length]; mAvailableOutputBuffers.clear(); break; case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: if (mOutputFormatChangedListener != null) { mHandler.post( new Runnable() { @Override public void run() { mOutputFormatChangedListener.outputFormatChanged( MediaCodecWrapper.this, mDecoder.getOutputFormat()); } }); } break; default: // Making sure the index is valid before adding to output buffers. We've already // handled INFO_TRY_AGAIN_LATER, INFO_OUTPUT_FORMAT_CHANGED & // INFO_OUTPUT_BUFFERS_CHANGED i.e all the other possible return codes but // asserting index value anyways for future-proofing the code. if (index >= 0) { mOutputBufferInfo[index] = info; mAvailableOutputBuffers.add(index); } else { throw new IllegalStateException("Unknown status from dequeueOutputBuffer"); } break; } } // END_INCLUDE(update_codec_state) }
/** @param frameCallback */ private final void handleOutputVideo(final IFrameCallback frameCallback) { // if (DEBUG) Log.v(TAG, "handleDrainVideo:"); while (mIsRunning && !mVideoOutputDone) { final int decoderStatus = mVideoMediaCodec.dequeueOutputBuffer(mVideoBufferInfo, TIMEOUT_USEC); if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { return; } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { mVideoOutputBuffers = mVideoMediaCodec.getOutputBuffers(); if (DEBUG) Log.d(TAG, "INFO_OUTPUT_BUFFERS_CHANGED:"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { final MediaFormat newFormat = mVideoMediaCodec.getOutputFormat(); if (DEBUG) Log.d(TAG, "video decoder output format changed: " + newFormat); } else if (decoderStatus < 0) { throw new RuntimeException( "unexpected result from video decoder.dequeueOutputBuffer: " + decoderStatus); } else { // decoderStatus >= 0 boolean doRender = false; if (mVideoBufferInfo.size > 0) { doRender = (mVideoBufferInfo.size != 0) && !internal_write_video( mVideoOutputBuffers[decoderStatus], 0, mVideoBufferInfo.size, mVideoBufferInfo.presentationTimeUs); if (doRender) { if (!frameCallback.onFrameAvailable(mVideoBufferInfo.presentationTimeUs)) mVideoStartTime = adjustPresentationTime( mVideoSync, mVideoStartTime, mVideoBufferInfo.presentationTimeUs); } } mVideoMediaCodec.releaseOutputBuffer(decoderStatus, doRender); if ((mVideoBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (DEBUG) Log.d(TAG, "video:output EOS"); synchronized (mSync) { mVideoOutputDone = true; mSync.notifyAll(); } } } } }
private void recordVideo() { ByteBuffer[] encodeOutputBuffers = mEncoder.getOutputBuffers(); while (!endEncode.get()) { int encoderIndex = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US); if (encoderIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { if (mediaMuxerStarted) { throw new IllegalStateException("output format already changed"); } MediaFormat newFormat = mEncoder.getOutputFormat(); videoTrackIndex = mediaMuxer.addTrack(newFormat); mediaMuxer.start(); mediaMuxerStarted = true; } else if (encoderIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { try { Thread.sleep(10); } catch (InterruptedException e) { e.printStackTrace(); } } else if (encoderIndex < 0) { Log.w(TAG, "encoderIndex 非法" + encoderIndex); } else if (encoderIndex >= 0) { ByteBuffer encodeData = encodeOutputBuffers[encoderIndex]; if (encodeData == null) { throw new RuntimeException("编码数据为空"); } if (mBufferInfo.size != 0) { if (!mediaMuxerStarted) throw new RuntimeException("混合器未开启"); encodeData.position(mBufferInfo.offset); encodeData.limit(mBufferInfo.offset + mBufferInfo.size); mediaMuxer.writeSampleData(videoTrackIndex, encodeData, mBufferInfo); mEncoder.releaseOutputBuffer(encoderIndex, false); } } } }
/** * Checks the video data. * * @return the number of bad frames */ private int checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface) { final int TIMEOUT_USEC = 1000; ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int checkIndex = 0; int badFrames = 0; boolean outputDone = false; boolean inputDone = false; while (!outputDone) { if (VERBOSE) Log.d(TAG, "check loop"); // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { if (inputChunk == inputData.getNumChunks()) { // End of stream -- send empty frame with EOS flag set. decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM); inputDone = true; if (VERBOSE) Log.d(TAG, "sent input EOS"); } else { // Copy a chunk of input to the decoder. The first chunk should have // the BUFFER_FLAG_CODEC_CONFIG flag set. ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.clear(); inputData.getChunkData(inputChunk, inputBuf); int flags = inputData.getChunkFlags(inputChunk); long time = inputData.getChunkTime(inputChunk); decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(), time, flags); if (VERBOSE) { Log.d( TAG, "submitted frame " + inputChunk + " to dec, size=" + inputBuf.position() + " flags=" + flags); } inputChunk++; } } else { if (VERBOSE) Log.d(TAG, "input buffer not available"); } } if (!outputDone) { int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from decoder available"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { decoderOutputBuffers = decoder.getOutputBuffers(); if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = decoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat); } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else { // decoderStatus >= 0 ByteBuffer decodedData = decoderOutputBuffers[decoderStatus]; if (VERBOSE) Log.d( TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.size + ")"); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (VERBOSE) Log.d(TAG, "output EOS"); outputDone = true; } boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't guarantee // that the texture will be available before the call returns, so we // need to wait for the onFrameAvailable callback to fire. decoder.releaseOutputBuffer(decoderStatus, doRender); if (doRender) { if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex); assertEquals( "Wrong time stamp", computePresentationTime(checkIndex), info.presentationTimeUs); surface.awaitNewImage(); surface.drawImage(); if (!checkSurfaceFrame(checkIndex++)) { badFrames++; } } } } } return badFrames; }
/** Edits a stream of video data. */ private void editVideoData( VideoChunks inputData, MediaCodec decoder, OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder, VideoChunks outputData) { final int TIMEOUT_USEC = 10000; ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int outputCount = 0; boolean outputDone = false; boolean inputDone = false; boolean decoderDone = false; while (!outputDone) { if (VERBOSE) Log.d(TAG, "edit loop"); // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { if (inputChunk == inputData.getNumChunks()) { // End of stream -- send empty frame with EOS flag set. decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM); inputDone = true; if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)"); } else { // Copy a chunk of input to the decoder. The first chunk should have // the BUFFER_FLAG_CODEC_CONFIG flag set. ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.clear(); inputData.getChunkData(inputChunk, inputBuf); int flags = inputData.getChunkFlags(inputChunk); long time = inputData.getChunkTime(inputChunk); decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(), time, flags); if (VERBOSE) { Log.d( TAG, "submitted frame " + inputChunk + " to dec, size=" + inputBuf.position() + " flags=" + flags); } inputChunk++; } } else { if (VERBOSE) Log.d(TAG, "input buffer not available"); } } // Assume output is available. Loop until both assumptions are false. boolean decoderOutputAvailable = !decoderDone; boolean encoderOutputAvailable = true; while (decoderOutputAvailable || encoderOutputAvailable) { // Start by draining any pending output from the encoder. It's important to // do this before we try to stuff any more data in. int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from encoder available"); encoderOutputAvailable = false; } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { encoderOutputBuffers = encoder.getOutputBuffers(); if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = encoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); } else if (encoderStatus < 0) { fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else { // encoderStatus >= 0 ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { fail("encoderOutputBuffer " + encoderStatus + " was null"); } // Write the data to the output "file". if (info.size != 0) { encodedData.position(info.offset); encodedData.limit(info.offset + info.size); outputData.addChunk(encodedData, info.flags, info.presentationTimeUs); outputCount++; if (VERBOSE) Log.d(TAG, "encoder output " + info.size + " bytes"); } outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; encoder.releaseOutputBuffer(encoderStatus, false); } if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) { // Continue attempts to drain output. continue; } // Encoder is drained, check to see if we've got a new frame of output from // the decoder. (The output is going to a Surface, rather than a ByteBuffer, // but we still get information through BufferInfo.) if (!decoderDone) { int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from decoder available"); decoderOutputAvailable = false; } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // decoderOutputBuffers = decoder.getOutputBuffers(); if (VERBOSE) Log.d(TAG, "decoder output buffers changed (we don't care)"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { // expected before first buffer of data MediaFormat newFormat = decoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat); } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else { // decoderStatus >= 0 if (VERBOSE) Log.d( TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.size + ")"); // The ByteBuffers are null references, but we still get a nonzero // size for the decoded data. boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't // guarantee that the texture will be available before the call // returns, so we need to wait for the onFrameAvailable callback to // fire. If we don't wait, we risk rendering from the previous frame. decoder.releaseOutputBuffer(decoderStatus, doRender); if (doRender) { // This waits for the image and renders it after it arrives. if (VERBOSE) Log.d(TAG, "awaiting frame"); outputSurface.awaitNewImage(); outputSurface.drawImage(); // Send it to the encoder. inputSurface.setPresentationTime(info.presentationTimeUs * 1000); if (VERBOSE) Log.d(TAG, "swapBuffers"); inputSurface.swapBuffers(); } if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { // forward decoder EOS to encoder if (VERBOSE) Log.d(TAG, "signaling input EOS"); if (WORK_AROUND_BUGS) { // Bail early, possibly dropping a frame. return; } else { encoder.signalEndOfInputStream(); } } } } } } if (inputChunk != outputCount) { throw new RuntimeException("frame lost: " + inputChunk + " in, " + outputCount + " out"); } }
/** * Generates video frames, feeds them into the encoder, and writes the output to the VideoChunks * instance. */ private void generateVideoData( MediaCodec encoder, InputSurface inputSurface, VideoChunks output) { final int TIMEOUT_USEC = 10000; ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int generateIndex = 0; int outputCount = 0; // Loop until the output side is done. boolean inputDone = false; boolean outputDone = false; while (!outputDone) { if (VERBOSE) Log.d(TAG, "gen loop"); // If we're not done submitting frames, generate a new one and submit it. The // eglSwapBuffers call will block if the input is full. if (!inputDone) { if (generateIndex == NUM_FRAMES) { // Send an empty frame with the end-of-stream flag set. if (VERBOSE) Log.d(TAG, "signaling input EOS"); if (WORK_AROUND_BUGS) { // Might drop a frame, but at least we won't crash mediaserver. try { Thread.sleep(500); } catch (InterruptedException ie) { } outputDone = true; } else { encoder.signalEndOfInputStream(); } inputDone = true; } else { generateSurfaceFrame(generateIndex); inputSurface.setPresentationTime(computePresentationTime(generateIndex) * 1000); if (VERBOSE) Log.d(TAG, "inputSurface swapBuffers"); inputSurface.swapBuffers(); } generateIndex++; } // Check for output from the encoder. If there's no output yet, we either need to // provide more input, or we need to wait for the encoder to work its magic. We // can't actually tell which is the case, so if we can't get an output buffer right // away we loop around and see if it wants more input. // // If we do find output, drain it all before supplying more input. while (true) { int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from encoder available"); break; // out of while } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // not expected for an encoder encoderOutputBuffers = encoder.getOutputBuffers(); if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { // not expected for an encoder MediaFormat newFormat = encoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); } else if (encoderStatus < 0) { fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else { // encoderStatus >= 0 ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { fail("encoderOutputBuffer " + encoderStatus + " was null"); } // Codec config flag must be set iff this is the first chunk of output. This // may not hold for all codecs, but it appears to be the case for video/avc. assertTrue((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0 || outputCount != 0); if (info.size != 0) { // Adjust the ByteBuffer values to match BufferInfo. encodedData.position(info.offset); encodedData.limit(info.offset + info.size); output.addChunk(encodedData, info.flags, info.presentationTimeUs); outputCount++; } encoder.releaseOutputBuffer(encoderStatus, false); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { outputDone = true; break; // out of while } } } } // One chunk per frame, plus one for the config data. assertEquals("Frame count", NUM_FRAMES + 1, outputCount); }
// Pass null in |sharedContext| to configure the codec for ByteBuffer output. private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) { if (mediaCodecThread != null) { throw new RuntimeException("Forgot to release()?"); } useSurface = (sharedContext != null); String mime = null; String[] supportedCodecPrefixes = null; if (type == VideoCodecType.VIDEO_CODEC_VP8) { mime = VP8_MIME_TYPE; supportedCodecPrefixes = supportedVp8HwCodecPrefixes; } else if (type == VideoCodecType.VIDEO_CODEC_H264) { mime = H264_MIME_TYPE; supportedCodecPrefixes = supportedH264HwCodecPrefixes; } else { throw new RuntimeException("Non supported codec " + type); } DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); if (properties == null) { throw new RuntimeException("Cannot find HW decoder for " + type); } Logging.d( TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x" + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface); if (sharedContext != null) { Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); } mediaCodecThread = Thread.currentThread(); try { Surface decodeSurface = null; this.width = width; this.height = height; stride = width; sliceHeight = height; if (useSurface) { // Create shared EGL context. eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); eglBase.createDummyPbufferSurface(); eglBase.makeCurrent(); // Create output surface textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); Logging.d(TAG, "Video decoder TextureID = " + textureID); surfaceTexture = new SurfaceTexture(textureID); surface = new Surface(surfaceTexture); decodeSurface = surface; } MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); if (!useSurface) { format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); } Logging.d(TAG, " Format: " + format); mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName); if (mediaCodec == null) { return false; } mediaCodec.configure(format, decodeSurface, null, 0); mediaCodec.start(); colorFormat = properties.colorFormat; outputBuffers = mediaCodec.getOutputBuffers(); inputBuffers = mediaCodec.getInputBuffers(); Logging.d( TAG, "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length); return true; } catch (IllegalStateException e) { Logging.e(TAG, "initDecode failed", e); return false; } }
/** * Do encoding by using MediaCodec encoder, then extracts all pending data from the encoder and * forwards it to the muxer. * * <p>If notifyEndOfStream is not set, this returns when there is no more data to output. If it is * set, we send EOS to the encoder, and then iterate until we see EOS on the output. Calling this * with notifyEndOfStream set should be done once, before stopping the muxer. * * <p>We're just using the muxer to get a .mp4 file and audio is not included here. */ private void doMediaCodecEncoding(boolean notifyEndOfStream) { if (VERBOSE) { Log.v(TAG, "doMediaCodecEncoding(" + notifyEndOfStream + ")"); } if (notifyEndOfStream) { mEncoder.signalEndOfInputStream(); } ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers(); boolean notDone = true; while (notDone) { int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { if (!notifyEndOfStream) { /** * Break out of the while loop because the encoder is not ready to output anything yet. */ notDone = false; } else { if (VERBOSE) { Log.v(TAG, "no output available, spinning to await EOS"); } } } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // generic case for mediacodec, not likely occurs for encoder. encoderOutputBuffers = mEncoder.getOutputBuffers(); } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { /** should happen before receiving buffers, and should only happen once */ if (mMuxerStarted) { throw new IllegalStateException("format changed twice"); } MediaFormat newFormat = mEncoder.getOutputFormat(); if (VERBOSE) { Log.v(TAG, "encoder output format changed: " + newFormat); } mTrackIndex = mMuxer.addTrack(newFormat); mMuxer.start(); mMuxerStarted = true; } else if (encoderStatus < 0) { Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else { // Normal flow: get output encoded buffer, send to muxer. ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); } if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { /** * The codec config data was pulled out and fed to the muxer when we got the * INFO_OUTPUT_FORMAT_CHANGED status. Ignore it. */ if (VERBOSE) { Log.v(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG"); } mBufferInfo.size = 0; } if (mBufferInfo.size != 0) { if (!mMuxerStarted) { throw new RuntimeException("muxer hasn't started"); } /** It's usually necessary to adjust the ByteBuffer values to match BufferInfo. */ encodedData.position(mBufferInfo.offset); encodedData.limit(mBufferInfo.offset + mBufferInfo.size); mMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); if (VERBOSE) { Log.v(TAG, "sent " + mBufferInfo.size + " bytes to muxer"); } } mEncoder.releaseOutputBuffer(encoderStatus, false); if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (!notifyEndOfStream) { Log.w(TAG, "reached end of stream unexpectedly"); } else { if (VERBOSE) { Log.v(TAG, "end of stream reached"); } } // Finish encoding. notDone = false; } } } // End of while(notDone) }
@CalledByNative private static boolean decodeAudioFile( Context ctx, int nativeMediaCodecBridge, int inputFD, long dataSize) { if (dataSize < 0 || dataSize > 0x7fffffff) return false; MediaExtractor extractor = new MediaExtractor(); ParcelFileDescriptor encodedFD; encodedFD = ParcelFileDescriptor.adoptFd(inputFD); try { extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize); } catch (Exception e) { e.printStackTrace(); encodedFD.detachFd(); return false; } if (extractor.getTrackCount() <= 0) { encodedFD.detachFd(); return false; } MediaFormat format = extractor.getTrackFormat(0); // Number of channels specified in the file int inputChannelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); // Number of channels the decoder will provide. (Not // necessarily the same as inputChannelCount. See // crbug.com/266006.) int outputChannelCount = inputChannelCount; int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); String mime = format.getString(MediaFormat.KEY_MIME); long durationMicroseconds = 0; if (format.containsKey(MediaFormat.KEY_DURATION)) { try { durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION); } catch (Exception e) { Log.d(LOG_TAG, "Cannot get duration"); } } if (DEBUG) { Log.d( LOG_TAG, "Tracks: " + extractor.getTrackCount() + " Rate: " + sampleRate + " Channels: " + inputChannelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec"); } nativeInitializeDestination( nativeMediaCodecBridge, inputChannelCount, sampleRate, durationMicroseconds); // Create decoder MediaCodec codec = MediaCodec.createDecoderByType(mime); codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */); codec.start(); ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers(); // A track must be selected and will be used to read samples. extractor.selectTrack(0); boolean sawInputEOS = false; boolean sawOutputEOS = false; // Keep processing until the output is done. while (!sawOutputEOS) { if (!sawInputEOS) { // Input side int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS); if (inputBufIndex >= 0) { ByteBuffer dstBuf = codecInputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0); long presentationTimeMicroSec = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeMicroSec = extractor.getSampleTime(); } codec.queueInputBuffer( inputBufIndex, 0, /* offset */ sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (!sawInputEOS) { extractor.advance(); } } } // Output side MediaCodec.BufferInfo info = new BufferInfo(); final int outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS); if (outputBufIndex >= 0) { ByteBuffer buf = codecOutputBuffers[outputBufIndex]; if (info.size > 0) { nativeOnChunkDecoded( nativeMediaCodecBridge, buf, info.size, inputChannelCount, outputChannelCount); } buf.clear(); codec.releaseOutputBuffer(outputBufIndex, false /* render */); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { codecOutputBuffers = codec.getOutputBuffers(); } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = codec.getOutputFormat(); outputChannelCount = newFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); Log.d(LOG_TAG, "output format changed to " + newFormat); } } encodedFD.detachFd(); codec.stop(); codec.release(); codec = null; return true; }
/** drain encoded data and write them to muxer */ protected void drain() { if (mMediaCodec == null) return; ByteBuffer[] encoderOutputBuffers = mMediaCodec.getOutputBuffers(); int encoderStatus, count = 0; final MediaMuxerWrapper muxer = mWeakMuxer.get(); if (muxer == null) { // throw new NullPointerException("muxer is unexpectedly null"); Log.w(TAG, "muxer is unexpectedly null"); return; } LOOP: while (mIsCapturing) { // get encoded data with maximum timeout duration of TIMEOUT_USEC(=10[msec]) encoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // wait 5 counts(=TIMEOUT_USEC x 5 = 50msec) until data/EOS come if (!mIsEOS) { if (++count > 5) break LOOP; // out of while } } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { if (DEBUG) Log.v(TAG, "INFO_OUTPUT_BUFFERS_CHANGED"); // this shoud not come when encoding encoderOutputBuffers = mMediaCodec.getOutputBuffers(); } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { if (DEBUG) Log.v(TAG, "INFO_OUTPUT_FORMAT_CHANGED"); // this status indicate the output format of codec is changed // this should come only once before actual encoded data // but this status never come on Android4.3 or less // and in that case, you should treat when MediaCodec.BUFFER_FLAG_CODEC_CONFIG come. if (mMuxerStarted) { // second time request is error throw new RuntimeException("format changed twice"); } // get output format from codec and pass them to muxer // getOutputFormat should be called after INFO_OUTPUT_FORMAT_CHANGED otherwise crash. final MediaFormat format = mMediaCodec.getOutputFormat(); // API >= 16 mTrackIndex = muxer.addTrack(format); mMuxerStarted = true; if (!muxer.start()) { // we should wait until muxer is ready synchronized (muxer) { while (!muxer.isStarted()) try { muxer.wait(100); } catch (InterruptedException e) { break LOOP; } } } } else if (encoderStatus < 0) { // unexpected status if (DEBUG) Log.w(TAG, "drain:unexpected result from encoder#dequeueOutputBuffer: " + encoderStatus); } else { final ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { // this never should come...may be a MediaCodec internal error throw new RuntimeException("encoderOutputBuffer " + encoderStatus + " was null"); } if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) { // You shoud set output format to muxer here when you target Android4.3 or less // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED // don't come yet) // therefor we should expand and prepare output format from buffer data. // This sample is for API>=18(>=Android 4.3), just ignore this flag here if (DEBUG) Log.d(TAG, "drain:BUFFER_FLAG_CODEC_CONFIG"); mBufferInfo.size = 0; } if (mBufferInfo.size != 0) { // encoded data is ready, clear waiting counter count = 0; if (!mMuxerStarted) { // muxer is not ready...this will prrograming failure. throw new RuntimeException("drain:muxer hasn't started"); } // write encoded data to muxer(need to adjust presentationTimeUs. mBufferInfo.presentationTimeUs = getPTSUs(); muxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo); prevOutputPTSUs = mBufferInfo.presentationTimeUs; } // return buffer to encoder mMediaCodec.releaseOutputBuffer(encoderStatus, false); if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { // when EOS come. mMuxerStarted = mIsCapturing = false; break; // out of while } } } }
@Override public void run() { setup(); synchronized (setupSignal) { setupSignal.notify(); } if (!valid) { return; } decoder.start(); ByteBuffer[] inputBuffers = decoder.getInputBuffers(); ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); boolean isEOS = false; long startMs = System.currentTimeMillis(); long timeoutUs = 10000; int iframe = 0; while (!Thread.interrupted()) { if (!isEOS) { int inIndex = decoder.dequeueInputBuffer(10000); if (inIndex >= 0) { ByteBuffer buffer = inputBuffers[inIndex]; int sampleSize = extractor.readSampleData(buffer, 0); if (sampleSize < 0) { if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "InputBuffer BUFFER_FLAG_END_OF_STREAM"); } decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); isEOS = true; } else { decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0); extractor.advance(); } } } int outIndex = decoder.dequeueOutputBuffer(info, 10000); MediaFormat outFormat; switch (outIndex) { case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "INFO_OUTPUT_BUFFERS_CHANGED"); } outputBuffers = decoder.getOutputBuffers(); break; case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: outFormat = decoder.getOutputFormat(); if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "New format " + outFormat); } break; case MediaCodec.INFO_TRY_AGAIN_LATER: if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "dequeueOutputBuffer timed out!"); } break; default: ByteBuffer buffer = outputBuffers[outIndex]; boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't // guarantee that the texture will be available before the call // returns, so we need to wait for the onFrameAvailable callback to // fire. If we don't wait, we risk rendering from the previous frame. decoder.releaseOutputBuffer(outIndex, doRender); if (doRender) { surface.awaitNewImage(); surface.drawImage(); if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "Finish drawing a frame!"); } if ((iframe++ % mDecimation) == 0) { // Send the frame for processing mMatBuffer.put(); } } break; } if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM"); } break; } } mMatBuffer.invalidate(); decoder.stop(); teardown(); mThread = null; }
@Override public void run() { isDecoding = true; codec.start(); @SuppressWarnings("deprecation") ByteBuffer[] inputBuffers = codec.getInputBuffers(); @SuppressWarnings("deprecation") ByteBuffer[] outputBuffers = codec.getOutputBuffers(); boolean sawInputEOS = false; boolean sawOutputEOS = false; while (!sawInputEOS && !sawOutputEOS && continuing) { if (state == State.PAUSED) { try { synchronized (decoderLock) { decoderLock.wait(); } } catch (InterruptedException e) { // Purposely not doing anything here } continue; } if (sonic != null) { sonic.setSpeed(speed); sonic.setPitch(1); } int inputBufIndex = codec.dequeueInputBuffer(200); if (inputBufIndex >= 0) { ByteBuffer dstBuf = inputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0); long presentationTimeUs = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeUs = extractor.getSampleTime(); } codec.queueInputBuffer( inputBufIndex, 0, sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (flushCodec) { codec.flush(); flushCodec = false; } if (!sawInputEOS) { extractor.advance(); } } final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); byte[] modifiedSamples = new byte[info.size]; int res; //noinspection deprecation do { res = codec.dequeueOutputBuffer(info, 200); if (res >= 0) { final byte[] chunk = new byte[info.size]; outputBuffers[res].get(chunk); outputBuffers[res].clear(); if (chunk.length > 0) { sonic.writeBytesToStream(chunk, chunk.length); } else { sonic.flushStream(); } int available = sonic.availableBytes(); if (available > 0) { if (modifiedSamples.length < available) { modifiedSamples = new byte[available]; } sonic.readBytesFromStream(modifiedSamples, available); track.write(modifiedSamples, 0, available); } codec.releaseOutputBuffer(res, false); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else //noinspection deprecation if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { //noinspection deprecation outputBuffers = codec.getOutputBuffers(); } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { track.stop(); lock.lock(); try { track.release(); final MediaFormat oFormat = codec.getOutputFormat(); initDevice( oFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), oFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT)); //noinspection deprecation outputBuffers = codec.getOutputBuffers(); track.play(); } catch (IOException e) { e.printStackTrace(); } finally { lock.unlock(); } } } while (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED); } codec.stop(); track.stop(); isDecoding = false; if (continuing && (sawInputEOS || sawOutputEOS)) { state = State.PLAYBACK_COMPLETED; L.d(TAG, "State changed to: " + state); Thread t = new Thread( new Runnable() { @Override public void run() { if (onCompletionListener != null) { onCompletionListener.onCompletion(); } stayAwake(false); } }); t.setDaemon(true); t.start(); } synchronized (decoderLock) { decoderLock.notifyAll(); } }