/** * Method to set byte array to the MediaCodec encoder * * @param buffer * @param length length of byte array, zero means EOS. * @param presentationTimeUs */ protected void encode(byte[] buffer, int length, long presentationTimeUs) { if (!mIsCapturing) return; int ix = 0, sz; final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers(); while (mIsCapturing && ix < length) { final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufferIndex >= 0) { final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex]; inputBuffer.clear(); sz = inputBuffer.remaining(); sz = (ix + sz < length) ? sz : length - ix; if (sz > 0 && (buffer != null)) { inputBuffer.put(buffer, ix, sz); } ix += sz; // if (DEBUG) Log.v(TAG, "encode:queueInputBuffer"); if (length <= 0) { // send EOS mIsEOS = true; if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM"); mMediaCodec.queueInputBuffer( inputBufferIndex, 0, 0, presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM); break; } else { mMediaCodec.queueInputBuffer(inputBufferIndex, 0, sz, presentationTimeUs, 0); } } else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) { // wait for MediaCodec encoder is ready to encode // nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC) // will wait for maximum TIMEOUT_USEC(10msec) on each call } } }
private final void handleInputVideo() { long presentationTimeUs = mVideoMediaExtractor.getSampleTime(); if (presentationTimeUs < previousVideoPresentationTimeUs) { presentationTimeUs += previousVideoPresentationTimeUs - presentationTimeUs; // + EPS; } previousVideoPresentationTimeUs = presentationTimeUs; final boolean b = internal_process_input( mVideoMediaCodec, mVideoMediaExtractor, mVideoInputBuffers, presentationTimeUs, false); if (!b) { if (DEBUG) Log.i(TAG, "video track input reached EOS"); while (mIsRunning) { final int inputBufIndex = mVideoMediaCodec.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { mVideoMediaCodec.queueInputBuffer( inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM); if (DEBUG) Log.v(TAG, "sent input EOS:" + mVideoMediaCodec); break; } } synchronized (mSync) { mVideoInputDone = true; mSync.notifyAll(); } } }
/** * Write a media sample to the decoder. * * <p>A "sample" here refers to a single atomic access unit in the media stream. The definition of * "access unit" is dependent on the type of encoding used, but it typically refers to a single * frame of video or a few seconds of audio. {@link android.media.MediaExtractor} extracts data * from a stream one sample at a time. * * @param extractor Instance of {@link android.media.MediaExtractor} wrapping the media. * @param presentationTimeUs The time, relative to the beginning of the media stream, at which * this buffer should be rendered. * @param flags Flags to pass to the decoder. See {@link MediaCodec#queueInputBuffer(int, int, * int, long, int)} * @throws MediaCodec.CryptoException */ public boolean writeSample( final MediaExtractor extractor, final boolean isSecure, final long presentationTimeUs, int flags) { boolean result = false; boolean isEos = false; if (!mAvailableInputBuffers.isEmpty()) { int index = mAvailableInputBuffers.remove(); ByteBuffer buffer = mInputBuffers[index]; // reads the sample from the file using extractor into the buffer int size = extractor.readSampleData(buffer, 0); if (size <= 0) { flags |= MediaCodec.BUFFER_FLAG_END_OF_STREAM; } // Submit the buffer to the codec for decoding. The presentationTimeUs // indicates the position (play time) for the current sample. if (!isSecure) { mDecoder.queueInputBuffer(index, 0, size, presentationTimeUs, flags); } else { extractor.getSampleCryptoInfo(cryptoInfo); mDecoder.queueSecureInputBuffer(index, 0, cryptoInfo, presentationTimeUs, flags); } result = true; } return result; }
private boolean queueInputBuffer(int inputBufferIndex, int size, long timestampUs) { checkOnMediaCodecThread(); try { inputBuffers[inputBufferIndex].position(0); inputBuffers[inputBufferIndex].limit(size); mediaCodec.queueInputBuffer(inputBufferIndex, 0, size, timestampUs, 0); return true; } catch (IllegalStateException e) { Logging.e(TAG, "decode failed", e); return false; } }
/** * @param codec * @param extractor * @param inputBuffers * @param presentationTimeUs * @param isAudio */ protected boolean internal_process_input( final MediaCodec codec, final MediaExtractor extractor, final ByteBuffer[] inputBuffers, final long presentationTimeUs, final boolean isAudio) { // if (DEBUG) Log.v(TAG, "internal_process_input:presentationTimeUs=" + presentationTimeUs); boolean result = true; while (mIsRunning) { final int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex == MediaCodec.INFO_TRY_AGAIN_LATER) break; if (inputBufIndex >= 0) { final int size = extractor.readSampleData(inputBuffers[inputBufIndex], 0); if (size > 0) { codec.queueInputBuffer(inputBufIndex, 0, size, presentationTimeUs, 0); } result = extractor.advance(); // return false if no data is available break; } } return result; }
/** * Write a media sample to the decoder. * * <p>A "sample" here refers to a single atomic access unit in the media stream. The definition of * "access unit" is dependent on the type of encoding used, but it typically refers to a single * frame of video or a few seconds of audio. {@link android.media.MediaExtractor} extracts data * from a stream one sample at a time. * * @param input A ByteBuffer containing the input data for one sample. The buffer must be set up * for reading, with its position set to the beginning of the sample data and its limit set to * the end of the sample data. * @param presentationTimeUs The time, relative to the beginning of the media stream, at which * this buffer should be rendered. * @param flags Flags to pass to the decoder. See {@link MediaCodec#queueInputBuffer(int, int, * int, long, int)} * @throws MediaCodec.CryptoException */ public boolean writeSample( final ByteBuffer input, final MediaCodec.CryptoInfo crypto, final long presentationTimeUs, final int flags) throws MediaCodec.CryptoException, WriteException { boolean result = false; int size = input.remaining(); // check if we have dequed input buffers available from the codec if (size > 0 && !mAvailableInputBuffers.isEmpty()) { int index = mAvailableInputBuffers.remove(); ByteBuffer buffer = mInputBuffers[index]; // we can't write our sample to a lesser capacity input buffer. if (size > buffer.capacity()) { throw new MediaCodecWrapper.WriteException( String.format( "Insufficient capacity in MediaCodec buffer: " + "tried to write %d, buffer capacity is %d.", input.remaining(), buffer.capacity())); } buffer.clear(); buffer.put(input); // Submit the buffer to the codec for decoding. The presentationTimeUs // indicates the position (play time) for the current sample. if (crypto == null) { mDecoder.queueInputBuffer(index, 0, size, presentationTimeUs, flags); } else { mDecoder.queueSecureInputBuffer(index, 0, crypto, presentationTimeUs, flags); } result = true; } return result; }
/** * Checks the video data. * * @return the number of bad frames */ private int checkVideoData(VideoChunks inputData, MediaCodec decoder, OutputSurface surface) { final int TIMEOUT_USEC = 1000; ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); ByteBuffer[] decoderOutputBuffers = decoder.getOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int checkIndex = 0; int badFrames = 0; boolean outputDone = false; boolean inputDone = false; while (!outputDone) { if (VERBOSE) Log.d(TAG, "check loop"); // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { if (inputChunk == inputData.getNumChunks()) { // End of stream -- send empty frame with EOS flag set. decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM); inputDone = true; if (VERBOSE) Log.d(TAG, "sent input EOS"); } else { // Copy a chunk of input to the decoder. The first chunk should have // the BUFFER_FLAG_CODEC_CONFIG flag set. ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.clear(); inputData.getChunkData(inputChunk, inputBuf); int flags = inputData.getChunkFlags(inputChunk); long time = inputData.getChunkTime(inputChunk); decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(), time, flags); if (VERBOSE) { Log.d( TAG, "submitted frame " + inputChunk + " to dec, size=" + inputBuf.position() + " flags=" + flags); } inputChunk++; } } else { if (VERBOSE) Log.d(TAG, "input buffer not available"); } } if (!outputDone) { int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from decoder available"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { decoderOutputBuffers = decoder.getOutputBuffers(); if (VERBOSE) Log.d(TAG, "decoder output buffers changed"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = decoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat); } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else { // decoderStatus >= 0 ByteBuffer decodedData = decoderOutputBuffers[decoderStatus]; if (VERBOSE) Log.d( TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.size + ")"); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (VERBOSE) Log.d(TAG, "output EOS"); outputDone = true; } boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't guarantee // that the texture will be available before the call returns, so we // need to wait for the onFrameAvailable callback to fire. decoder.releaseOutputBuffer(decoderStatus, doRender); if (doRender) { if (VERBOSE) Log.d(TAG, "awaiting frame " + checkIndex); assertEquals( "Wrong time stamp", computePresentationTime(checkIndex), info.presentationTimeUs); surface.awaitNewImage(); surface.drawImage(); if (!checkSurfaceFrame(checkIndex++)) { badFrames++; } } } } } return badFrames; }
/** Edits a stream of video data. */ private void editVideoData( VideoChunks inputData, MediaCodec decoder, OutputSurface outputSurface, InputSurface inputSurface, MediaCodec encoder, VideoChunks outputData) { final int TIMEOUT_USEC = 10000; ByteBuffer[] decoderInputBuffers = decoder.getInputBuffers(); ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int inputChunk = 0; int outputCount = 0; boolean outputDone = false; boolean inputDone = false; boolean decoderDone = false; while (!outputDone) { if (VERBOSE) Log.d(TAG, "edit loop"); // Feed more data to the decoder. if (!inputDone) { int inputBufIndex = decoder.dequeueInputBuffer(TIMEOUT_USEC); if (inputBufIndex >= 0) { if (inputChunk == inputData.getNumChunks()) { // End of stream -- send empty frame with EOS flag set. decoder.queueInputBuffer(inputBufIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM); inputDone = true; if (VERBOSE) Log.d(TAG, "sent input EOS (with zero-length frame)"); } else { // Copy a chunk of input to the decoder. The first chunk should have // the BUFFER_FLAG_CODEC_CONFIG flag set. ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex]; inputBuf.clear(); inputData.getChunkData(inputChunk, inputBuf); int flags = inputData.getChunkFlags(inputChunk); long time = inputData.getChunkTime(inputChunk); decoder.queueInputBuffer(inputBufIndex, 0, inputBuf.position(), time, flags); if (VERBOSE) { Log.d( TAG, "submitted frame " + inputChunk + " to dec, size=" + inputBuf.position() + " flags=" + flags); } inputChunk++; } } else { if (VERBOSE) Log.d(TAG, "input buffer not available"); } } // Assume output is available. Loop until both assumptions are false. boolean decoderOutputAvailable = !decoderDone; boolean encoderOutputAvailable = true; while (decoderOutputAvailable || encoderOutputAvailable) { // Start by draining any pending output from the encoder. It's important to // do this before we try to stuff any more data in. int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from encoder available"); encoderOutputAvailable = false; } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { encoderOutputBuffers = encoder.getOutputBuffers(); if (VERBOSE) Log.d(TAG, "encoder output buffers changed"); } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = encoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "encoder output format changed: " + newFormat); } else if (encoderStatus < 0) { fail("unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus); } else { // encoderStatus >= 0 ByteBuffer encodedData = encoderOutputBuffers[encoderStatus]; if (encodedData == null) { fail("encoderOutputBuffer " + encoderStatus + " was null"); } // Write the data to the output "file". if (info.size != 0) { encodedData.position(info.offset); encodedData.limit(info.offset + info.size); outputData.addChunk(encodedData, info.flags, info.presentationTimeUs); outputCount++; if (VERBOSE) Log.d(TAG, "encoder output " + info.size + " bytes"); } outputDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0; encoder.releaseOutputBuffer(encoderStatus, false); } if (encoderStatus != MediaCodec.INFO_TRY_AGAIN_LATER) { // Continue attempts to drain output. continue; } // Encoder is drained, check to see if we've got a new frame of output from // the decoder. (The output is going to a Surface, rather than a ByteBuffer, // but we still get information through BufferInfo.) if (!decoderDone) { int decoderStatus = decoder.dequeueOutputBuffer(info, TIMEOUT_USEC); if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) { // no output available yet if (VERBOSE) Log.d(TAG, "no output from decoder available"); decoderOutputAvailable = false; } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { // decoderOutputBuffers = decoder.getOutputBuffers(); if (VERBOSE) Log.d(TAG, "decoder output buffers changed (we don't care)"); } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { // expected before first buffer of data MediaFormat newFormat = decoder.getOutputFormat(); if (VERBOSE) Log.d(TAG, "decoder output format changed: " + newFormat); } else if (decoderStatus < 0) { fail("unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus); } else { // decoderStatus >= 0 if (VERBOSE) Log.d( TAG, "surface decoder given buffer " + decoderStatus + " (size=" + info.size + ")"); // The ByteBuffers are null references, but we still get a nonzero // size for the decoded data. boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't // guarantee that the texture will be available before the call // returns, so we need to wait for the onFrameAvailable callback to // fire. If we don't wait, we risk rendering from the previous frame. decoder.releaseOutputBuffer(decoderStatus, doRender); if (doRender) { // This waits for the image and renders it after it arrives. if (VERBOSE) Log.d(TAG, "awaiting frame"); outputSurface.awaitNewImage(); outputSurface.drawImage(); // Send it to the encoder. inputSurface.setPresentationTime(info.presentationTimeUs * 1000); if (VERBOSE) Log.d(TAG, "swapBuffers"); inputSurface.swapBuffers(); } if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { // forward decoder EOS to encoder if (VERBOSE) Log.d(TAG, "signaling input EOS"); if (WORK_AROUND_BUGS) { // Bail early, possibly dropping a frame. return; } else { encoder.signalEndOfInputStream(); } } } } } } if (inputChunk != outputCount) { throw new RuntimeException("frame lost: " + inputChunk + " in, " + outputCount + " out"); } }
@CalledByNative private static boolean decodeAudioFile( Context ctx, int nativeMediaCodecBridge, int inputFD, long dataSize) { if (dataSize < 0 || dataSize > 0x7fffffff) return false; MediaExtractor extractor = new MediaExtractor(); ParcelFileDescriptor encodedFD; encodedFD = ParcelFileDescriptor.adoptFd(inputFD); try { extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize); } catch (Exception e) { e.printStackTrace(); encodedFD.detachFd(); return false; } if (extractor.getTrackCount() <= 0) { encodedFD.detachFd(); return false; } MediaFormat format = extractor.getTrackFormat(0); // Number of channels specified in the file int inputChannelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); // Number of channels the decoder will provide. (Not // necessarily the same as inputChannelCount. See // crbug.com/266006.) int outputChannelCount = inputChannelCount; int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); String mime = format.getString(MediaFormat.KEY_MIME); long durationMicroseconds = 0; if (format.containsKey(MediaFormat.KEY_DURATION)) { try { durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION); } catch (Exception e) { Log.d(LOG_TAG, "Cannot get duration"); } } if (DEBUG) { Log.d( LOG_TAG, "Tracks: " + extractor.getTrackCount() + " Rate: " + sampleRate + " Channels: " + inputChannelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec"); } nativeInitializeDestination( nativeMediaCodecBridge, inputChannelCount, sampleRate, durationMicroseconds); // Create decoder MediaCodec codec = MediaCodec.createDecoderByType(mime); codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */); codec.start(); ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers(); // A track must be selected and will be used to read samples. extractor.selectTrack(0); boolean sawInputEOS = false; boolean sawOutputEOS = false; // Keep processing until the output is done. while (!sawOutputEOS) { if (!sawInputEOS) { // Input side int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS); if (inputBufIndex >= 0) { ByteBuffer dstBuf = codecInputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0); long presentationTimeMicroSec = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeMicroSec = extractor.getSampleTime(); } codec.queueInputBuffer( inputBufIndex, 0, /* offset */ sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (!sawInputEOS) { extractor.advance(); } } } // Output side MediaCodec.BufferInfo info = new BufferInfo(); final int outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS); if (outputBufIndex >= 0) { ByteBuffer buf = codecOutputBuffers[outputBufIndex]; if (info.size > 0) { nativeOnChunkDecoded( nativeMediaCodecBridge, buf, info.size, inputChannelCount, outputChannelCount); } buf.clear(); codec.releaseOutputBuffer(outputBufIndex, false /* render */); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { codecOutputBuffers = codec.getOutputBuffers(); } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = codec.getOutputFormat(); outputChannelCount = newFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); Log.d(LOG_TAG, "output format changed to " + newFormat); } } encodedFD.detachFd(); codec.stop(); codec.release(); codec = null; return true; }
@Override public void run() { setup(); synchronized (setupSignal) { setupSignal.notify(); } if (!valid) { return; } decoder.start(); ByteBuffer[] inputBuffers = decoder.getInputBuffers(); ByteBuffer[] outputBuffers = decoder.getOutputBuffers(); MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); boolean isEOS = false; long startMs = System.currentTimeMillis(); long timeoutUs = 10000; int iframe = 0; while (!Thread.interrupted()) { if (!isEOS) { int inIndex = decoder.dequeueInputBuffer(10000); if (inIndex >= 0) { ByteBuffer buffer = inputBuffers[inIndex]; int sampleSize = extractor.readSampleData(buffer, 0); if (sampleSize < 0) { if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "InputBuffer BUFFER_FLAG_END_OF_STREAM"); } decoder.queueInputBuffer(inIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM); isEOS = true; } else { decoder.queueInputBuffer(inIndex, 0, sampleSize, extractor.getSampleTime(), 0); extractor.advance(); } } } int outIndex = decoder.dequeueOutputBuffer(info, 10000); MediaFormat outFormat; switch (outIndex) { case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED: if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "INFO_OUTPUT_BUFFERS_CHANGED"); } outputBuffers = decoder.getOutputBuffers(); break; case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED: outFormat = decoder.getOutputFormat(); if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "New format " + outFormat); } break; case MediaCodec.INFO_TRY_AGAIN_LATER: if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "dequeueOutputBuffer timed out!"); } break; default: ByteBuffer buffer = outputBuffers[outIndex]; boolean doRender = (info.size != 0); // As soon as we call releaseOutputBuffer, the buffer will be forwarded // to SurfaceTexture to convert to a texture. The API doesn't // guarantee that the texture will be available before the call // returns, so we need to wait for the onFrameAvailable callback to // fire. If we don't wait, we risk rendering from the previous frame. decoder.releaseOutputBuffer(outIndex, doRender); if (doRender) { surface.awaitNewImage(); surface.drawImage(); if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "Finish drawing a frame!"); } if ((iframe++ % mDecimation) == 0) { // Send the frame for processing mMatBuffer.put(); } } break; } if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { if (LOCAL_LOGD) { Log.d("VideoDecoderForOpenCV", "OutputBuffer BUFFER_FLAG_END_OF_STREAM"); } break; } } mMatBuffer.invalidate(); decoder.stop(); teardown(); mThread = null; }
@Override public void run() { isDecoding = true; codec.start(); @SuppressWarnings("deprecation") ByteBuffer[] inputBuffers = codec.getInputBuffers(); @SuppressWarnings("deprecation") ByteBuffer[] outputBuffers = codec.getOutputBuffers(); boolean sawInputEOS = false; boolean sawOutputEOS = false; while (!sawInputEOS && !sawOutputEOS && continuing) { if (state == State.PAUSED) { try { synchronized (decoderLock) { decoderLock.wait(); } } catch (InterruptedException e) { // Purposely not doing anything here } continue; } if (sonic != null) { sonic.setSpeed(speed); sonic.setPitch(1); } int inputBufIndex = codec.dequeueInputBuffer(200); if (inputBufIndex >= 0) { ByteBuffer dstBuf = inputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0); long presentationTimeUs = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeUs = extractor.getSampleTime(); } codec.queueInputBuffer( inputBufIndex, 0, sampleSize, presentationTimeUs, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (flushCodec) { codec.flush(); flushCodec = false; } if (!sawInputEOS) { extractor.advance(); } } final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); byte[] modifiedSamples = new byte[info.size]; int res; //noinspection deprecation do { res = codec.dequeueOutputBuffer(info, 200); if (res >= 0) { final byte[] chunk = new byte[info.size]; outputBuffers[res].get(chunk); outputBuffers[res].clear(); if (chunk.length > 0) { sonic.writeBytesToStream(chunk, chunk.length); } else { sonic.flushStream(); } int available = sonic.availableBytes(); if (available > 0) { if (modifiedSamples.length < available) { modifiedSamples = new byte[available]; } sonic.readBytesFromStream(modifiedSamples, available); track.write(modifiedSamples, 0, available); } codec.releaseOutputBuffer(res, false); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else //noinspection deprecation if (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { //noinspection deprecation outputBuffers = codec.getOutputBuffers(); } else if (res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { track.stop(); lock.lock(); try { track.release(); final MediaFormat oFormat = codec.getOutputFormat(); initDevice( oFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE), oFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT)); //noinspection deprecation outputBuffers = codec.getOutputBuffers(); track.play(); } catch (IOException e) { e.printStackTrace(); } finally { lock.unlock(); } } } while (res == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || res == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED); } codec.stop(); track.stop(); isDecoding = false; if (continuing && (sawInputEOS || sawOutputEOS)) { state = State.PLAYBACK_COMPLETED; L.d(TAG, "State changed to: " + state); Thread t = new Thread( new Runnable() { @Override public void run() { if (onCompletionListener != null) { onCompletionListener.onCompletion(); } stayAwake(false); } }); t.setDaemon(true); t.start(); } synchronized (decoderLock) { decoderLock.notifyAll(); } }