/** * Edits a video file, saving the contents to a new file. This involves decoding and re-encoding, * not to mention conversions between YUV and RGB, and so may be lossy. * * <p>If we recognize the decoded format we can do this in Java code using the ByteBuffer[] * output, but it's not practical to support all OEM formats. By using a SurfaceTexture for output * and a Surface for input, we can avoid issues with obscure formats and can use a fragment shader * to do transformations. */ private VideoChunks editVideoFile(VideoChunks inputData) { if (VERBOSE) Log.d(TAG, "editVideoFile " + mWidth + "x" + mHeight); VideoChunks outputData = new VideoChunks(); MediaCodec decoder = null; MediaCodec encoder = null; InputSurface inputSurface = null; OutputSurface outputSurface = null; try { MediaFormat inputFormat = inputData.getMediaFormat(); // Create an encoder format that matches the input format. (Might be able to just // re-use the format used to generate the video, since we want it to be the same.) MediaFormat outputFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); outputFormat.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); outputFormat.setInteger( MediaFormat.KEY_BIT_RATE, inputFormat.getInteger(MediaFormat.KEY_BIT_RATE)); outputFormat.setInteger( MediaFormat.KEY_FRAME_RATE, inputFormat.getInteger(MediaFormat.KEY_FRAME_RATE)); outputFormat.setInteger( MediaFormat.KEY_I_FRAME_INTERVAL, inputFormat.getInteger(MediaFormat.KEY_I_FRAME_INTERVAL)); outputData.setMediaFormat(outputFormat); encoder = MediaCodec.createEncoderByType(MIME_TYPE); encoder.configure(outputFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); inputSurface = new InputSurface(encoder.createInputSurface()); inputSurface.makeCurrent(); encoder.start(); // OutputSurface uses the EGL context created by InputSurface. decoder = MediaCodec.createDecoderByType(MIME_TYPE); outputSurface = new OutputSurface(); outputSurface.changeFragmentShader(FRAGMENT_SHADER); decoder.configure(inputFormat, outputSurface.getSurface(), null, 0); decoder.start(); editVideoData(inputData, decoder, outputSurface, inputSurface, encoder, outputData); } finally { if (VERBOSE) Log.d(TAG, "shutting down encoder, decoder"); if (outputSurface != null) { outputSurface.release(); } if (inputSurface != null) { inputSurface.release(); } if (encoder != null) { encoder.stop(); encoder.release(); } if (decoder != null) { decoder.stop(); decoder.release(); } } return outputData; }
public int getVideoWidth() { return mVideoFormat != null ? (int) (mVideoFormat.getInteger(MediaFormat.KEY_HEIGHT) * mVideoFormat.getFloat(MediaExtractor.MEDIA_FORMAT_EXTENSION_KEY_DAR)) : 0; }
@Override protected void onOutputFormatChanged(MediaCodec codec, android.media.MediaFormat outputFormat) { boolean hasCrop = outputFormat.containsKey(KEY_CROP_RIGHT) && outputFormat.containsKey(KEY_CROP_LEFT) && outputFormat.containsKey(KEY_CROP_BOTTOM) && outputFormat.containsKey(KEY_CROP_TOP); currentWidth = hasCrop ? outputFormat.getInteger(KEY_CROP_RIGHT) - outputFormat.getInteger(KEY_CROP_LEFT) + 1 : outputFormat.getInteger(android.media.MediaFormat.KEY_WIDTH); currentHeight = hasCrop ? outputFormat.getInteger(KEY_CROP_BOTTOM) - outputFormat.getInteger(KEY_CROP_TOP) + 1 : outputFormat.getInteger(android.media.MediaFormat.KEY_HEIGHT); currentPixelWidthHeightRatio = pendingPixelWidthHeightRatio; if (Util.SDK_INT >= 21) { // On API level 21 and above the decoder applies the rotation when rendering to the surface. // Hence currentUnappliedRotation should always be 0. For 90 and 270 degree rotations, we need // to flip the width, height and pixel aspect ratio to reflect the rotation that was applied. if (pendingRotationDegrees == 90 || pendingRotationDegrees == 270) { int rotatedHeight = currentWidth; currentWidth = currentHeight; currentHeight = rotatedHeight; currentPixelWidthHeightRatio = 1 / currentPixelWidthHeightRatio; } } else { // On API level 20 and below the decoder does not apply the rotation. currentUnappliedRotationDegrees = pendingRotationDegrees; } // Must be applied each time the output format changes. codec.setVideoScalingMode(videoScalingMode); }
/* */ public synchronized boolean a(String paramString) /* */ { /* 362 */ a.a("playWAV start :" + paramString); /* */ /* 364 */ if (!c(paramString)) { /* 365 */ a.b("file not exist"); /* 366 */ return false; /* */ } /* */ /* 369 */ String str1 = paramString.trim(); /* 370 */ String str2 = str1.substring(str1.lastIndexOf(".") + 1); /* 371 */ str2 = str2.toLowerCase(); /* 372 */ if (!str2.equalsIgnoreCase("wav")) { /* 373 */ a.b("file not wav file"); /* 374 */ g(); /* 375 */ return false; /* */ } /* */ /* 378 */ MediaExtractor localMediaExtractor = null; /* */ try { /* 380 */ localMediaExtractor = new MediaExtractor(); /* */ } catch (RuntimeException localRuntimeException1) { /* 382 */ localRuntimeException1.printStackTrace(); /* 383 */ a.b("MediaExtractor error"); /* */ } /* */ try /* */ { /* 387 */ localMediaExtractor.setDataSource(paramString); /* */ } catch (IOException localIOException) { /* 389 */ localIOException.printStackTrace(); /* 390 */ a.b("setDataSource error"); /* 391 */ return false; /* */ } /* */ /* 394 */ MediaFormat localMediaFormat = null; /* 395 */ int i1 = localMediaExtractor.getTrackCount(); /* 396 */ if (i1 > 0) { /* */ try { /* 398 */ localMediaFormat = localMediaExtractor.getTrackFormat(0); /* */ } catch (RuntimeException localRuntimeException2) { /* 400 */ localRuntimeException2.printStackTrace(); /* 401 */ a.b("getTrackFormat error"); /* 402 */ return false; /* */ } /* */ /* */ /* 406 */ int i2 = localMediaFormat.getInteger("sample-rate"); /* */ /* 408 */ this.j = localMediaFormat.getLong("durationUs"); /* */ /* 410 */ if (i2 < n) { /* 411 */ g(); /* 412 */ return false; /* */ } /* */ /* 415 */ return PlayWav(paramString); /* */ } /* 417 */ return false; /* */ }
// Dequeue and return an output buffer index, -1 if no output // buffer available or -2 if error happened. private DecoderOutputBufferInfo dequeueOutputBuffer(int dequeueTimeoutUs) { checkOnMediaCodecThread(); try { MediaCodec.BufferInfo info = new MediaCodec.BufferInfo(); int result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); while (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED || result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { outputBuffers = mediaCodec.getOutputBuffers(); Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length); } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat format = mediaCodec.getOutputFormat(); Logging.d(TAG, "Decoder format changed: " + format.toString()); width = format.getInteger(MediaFormat.KEY_WIDTH); height = format.getInteger(MediaFormat.KEY_HEIGHT); if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) { colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT); Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat)); // Check if new color space is supported. boolean validColorFormat = false; for (int supportedColorFormat : supportedColorList) { if (colorFormat == supportedColorFormat) { validColorFormat = true; break; } } if (!validColorFormat) { Logging.e(TAG, "Non supported color format"); return new DecoderOutputBufferInfo(-1, 0, 0, -1); } } if (format.containsKey("stride")) { stride = format.getInteger("stride"); } if (format.containsKey("slice-height")) { sliceHeight = format.getInteger("slice-height"); } Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight); stride = Math.max(width, stride); sliceHeight = Math.max(height, sliceHeight); } result = mediaCodec.dequeueOutputBuffer(info, dequeueTimeoutUs); } if (result >= 0) { return new DecoderOutputBufferInfo(result, info.offset, info.size, info.presentationTimeUs); } return null; } catch (IllegalStateException e) { Logging.e(TAG, "dequeueOutputBuffer failed", e); return new DecoderOutputBufferInfo(-1, 0, 0, -1); } }
/** return true iff all audio and video tracks are supported */ public static boolean hasCodecsForMedia(MediaExtractor ex) { for (int i = 0; i < ex.getTrackCount(); ++i) { MediaFormat format = ex.getTrackFormat(i); // only check for audio and video codecs String mime = format.getString(MediaFormat.KEY_MIME).toLowerCase(); if (!mime.startsWith("audio/") && !mime.startsWith("video/")) { continue; } if (!canDecode(format)) { return false; } } return true; }
/** return true iff any track starting with mimePrefix is supported */ public static boolean hasCodecForMediaAndDomain(MediaExtractor ex, String mimePrefix) { mimePrefix = mimePrefix.toLowerCase(); for (int i = 0; i < ex.getTrackCount(); ++i) { MediaFormat format = ex.getTrackFormat(i); String mime = format.getString(MediaFormat.KEY_MIME); if (mime.toLowerCase().startsWith(mimePrefix)) { if (canDecode(format)) { return true; } Log.i(TAG, "no decoder for " + format); } } return false; }
/** * @param media_extractor * @param trackIndex * @return */ protected MediaCodec internal_start_video( final MediaExtractor media_extractor, final int trackIndex) { if (DEBUG) Log.v(TAG, "internal_start_video:"); MediaCodec codec = null; if (trackIndex >= 0) { final MediaFormat format = media_extractor.getTrackFormat(trackIndex); final String mime = format.getString(MediaFormat.KEY_MIME); codec = MediaCodec.createDecoderByType(mime); codec.configure(format, mOutputSurface, null, 0); codec.start(); if (DEBUG) Log.v(TAG, "internal_start_video:codec started"); } return codec; }
/** * search first track index matched specific MIME * * @param extractor * @param mimeType "video/" or "audio/" * @return track index, -1 if not found */ protected static final int selectTrack(final MediaExtractor extractor, final String mimeType) { final int numTracks = extractor.getTrackCount(); MediaFormat format; String mime; for (int i = 0; i < numTracks; i++) { format = extractor.getTrackFormat(i); mime = format.getString(MediaFormat.KEY_MIME); if (mime.startsWith(mimeType)) { if (DEBUG) { Log.d(TAG_STATIC, "Extractor selected track " + i + " (" + mime + "): " + format); } return i; } } return -1; }
/** * Constructs the {@link MediaCodecWrapper} wrapper object around the video codec. The codec is * created using the encapsulated information in the {@link MediaFormat} object. * * @param trackFormat The format of the media object to be decoded. * @param surface Surface to render the decoded frames. * @return */ public static MediaCodecWrapper fromVideoFormat(final MediaFormat trackFormat, Surface surface) { MediaCodecWrapper result = null; MediaCodec videoCodec = null; // BEGIN_INCLUDE(create_codec) final String mimeType = trackFormat.getString(MediaFormat.KEY_MIME); // Check to see if this is actually a video mime type. If it is, then create // a codec that can decode this mime type. if (mimeType.contains("video/")) { try { videoCodec = MediaCodec.createDecoderByType(mimeType); videoCodec.configure(trackFormat, surface, null, 0); } catch (Exception e) { } } // If codec creation was successful, then create a wrapper object around the // newly created codec. if (videoCodec != null) { result = new MediaCodecWrapper(videoCodec); } // END_INCLUDE(create_codec) return result; }
public static void validateAudioOutputFormat(MediaFormat format) { String mime = format.getString(MediaFormat.KEY_MIME); if (!MediaFormatExtraConstants.MIMETYPE_AUDIO_AAC.equals(mime)) { throw new InvalidOutputFormatException( "Audio codecs other than AAC is not supported, actual mime type: " + mime); } }
@SuppressLint("InlinedApi") private void maybeSetMaxInputSize(android.media.MediaFormat format, boolean codecIsAdaptive) { if (format.containsKey(android.media.MediaFormat.KEY_MAX_INPUT_SIZE)) { // Already set. The source of the format may know better, so do nothing. return; } int maxHeight = format.getInteger(android.media.MediaFormat.KEY_HEIGHT); if (codecIsAdaptive && format.containsKey(android.media.MediaFormat.KEY_MAX_HEIGHT)) { maxHeight = Math.max(maxHeight, format.getInteger(android.media.MediaFormat.KEY_MAX_HEIGHT)); } int maxWidth = format.getInteger(android.media.MediaFormat.KEY_WIDTH); if (codecIsAdaptive && format.containsKey(android.media.MediaFormat.KEY_MAX_WIDTH)) { maxWidth = Math.max(maxHeight, format.getInteger(android.media.MediaFormat.KEY_MAX_WIDTH)); } int maxPixels; int minCompressionRatio; switch (format.getString(android.media.MediaFormat.KEY_MIME)) { case MimeTypes.VIDEO_H263: case MimeTypes.VIDEO_MP4V: maxPixels = maxWidth * maxHeight; minCompressionRatio = 2; break; case MimeTypes.VIDEO_H264: if ("BRAVIA 4K 2015".equals(Util.MODEL)) { // The Sony BRAVIA 4k TV has input buffers that are too small for the calculated 4k video // maximum input size, so use the default value. return; } // Round up width/height to an integer number of macroblocks. maxPixels = ((maxWidth + 15) / 16) * ((maxHeight + 15) / 16) * 16 * 16; minCompressionRatio = 2; break; case MimeTypes.VIDEO_VP8: // VPX does not specify a ratio so use the values from the platform's SoftVPX.cpp. maxPixels = maxWidth * maxHeight; minCompressionRatio = 2; break; case MimeTypes.VIDEO_H265: case MimeTypes.VIDEO_VP9: maxPixels = maxWidth * maxHeight; minCompressionRatio = 4; break; default: // Leave the default max input size. return; } // Estimate the maximum input size assuming three channel 4:2:0 subsampled input frames. int maxInputSize = (maxPixels * 3) / (2 * minCompressionRatio); format.setInteger(android.media.MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize); }
private void initStream() throws IOException, IllegalArgumentException { L.v(TAG, "initStream called in state=" + state); lock.lock(); try { extractor = new MediaExtractor(); if (path != null) { extractor.setDataSource(path); } else { error("initStream"); throw new IOException(); } int trackNum = 0; final MediaFormat oFormat = extractor.getTrackFormat(trackNum); if (!oFormat.containsKey(MediaFormat.KEY_SAMPLE_RATE)) { error("initStream"); throw new IOException("No KEY_SAMPLE_RATE"); } int sampleRate = oFormat.getInteger(MediaFormat.KEY_SAMPLE_RATE); if (!oFormat.containsKey(MediaFormat.KEY_CHANNEL_COUNT)) { error("initStream"); throw new IOException("No KEY_CHANNEL_COUNT"); } int channelCount = oFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); if (!oFormat.containsKey(MediaFormat.KEY_MIME)) { error("initStream"); throw new IOException("No KEY_MIME"); } final String mime = oFormat.getString(MediaFormat.KEY_MIME); if (!oFormat.containsKey(MediaFormat.KEY_DURATION)) { error("initStream"); throw new IOException("No KEY_DURATION"); } duration = oFormat.getLong(MediaFormat.KEY_DURATION); L.v(TAG, "Sample rate: " + sampleRate); L.v(TAG, "Mime type: " + mime); initDevice(sampleRate, channelCount); extractor.selectTrack(trackNum); codec = MediaCodec.createDecoderByType(mime); codec.configure(oFormat, null, null, 0); } finally { lock.unlock(); } }
void setup() { int width = 0, height = 0; extractor = new MediaExtractor(); try { extractor.setDataSource(mVideoFile.getPath()); } catch (IOException e) { return; } for (int i = 0; i < extractor.getTrackCount(); i++) { MediaFormat format = extractor.getTrackFormat(i); String mime = format.getString(MediaFormat.KEY_MIME); width = format.getInteger(MediaFormat.KEY_WIDTH); height = format.getInteger(MediaFormat.KEY_HEIGHT); if (mime.startsWith("video/")) { extractor.selectTrack(i); try { decoder = MediaCodec.createDecoderByType(mime); } catch (IOException e) { continue; } // Decode to surface // decoder.configure(format, surface, null, 0); // Decode to offscreen surface surface = new CtsMediaOutputSurface(width, height); mMatBuffer = new MatBuffer(width, height); decoder.configure(format, surface.getSurface(), null, 0); break; } } if (decoder == null) { Log.e("VideoDecoderForOpenCV", "Can't find video info!"); return; } valid = true; }
@Override protected void prepare() throws IOException { if (DEBUG) Log.v(TAG, "prepare:"); mTrackIndex = -1; mMuxerStarted = mIsEOS = false; // prepare MediaCodec for AAC encoding of audio data from inernal mic. final MediaCodecInfo audioCodecInfo = selectAudioCodec(MIME_TYPE); if (audioCodecInfo == null) { Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return; } if (DEBUG) Log.i(TAG, "selected codec: " + audioCodecInfo.getName()); final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1); audioFormat.setInteger( MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO); audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE); audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1); // audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length()); // audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs ); if (DEBUG) Log.i(TAG, "format: " + audioFormat); mMediaCodec = MediaCodec.createEncoderByType(MIME_TYPE); mMediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mMediaCodec.start(); if (DEBUG) Log.i(TAG, "prepare finishing"); if (mListener != null) { try { mListener.onPrepared(this); } catch (final Exception e) { Log.e(TAG, "prepare:", e); } } }
/** Video encoding is done by a MediaCodec. But here we will use the buffer-to-surface method */ @SuppressLint({"InlinedApi", "NewApi"}) protected void encodeWithMediaCodecMethod2() throws RuntimeException, IOException { Log.d(TAG, "Video encoded using the MediaCodec API with a surface"); // Updates the parameters of the camera if needed createCamera(); updateCamera(); // Estimates the framerate of the camera measureFramerate(); EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY); mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName()); MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate); mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate); mediaFormat.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); Surface surface = mMediaCodec.createInputSurface(); ((SurfaceView) mSurfaceView).addMediaCodecSurface(surface); mMediaCodec.start(); mStreaming = true; }
public PassThroughTrackTranscoder( MediaExtractor extractor, int trackIndex, QueuedMuxer muxer, QueuedMuxer.SampleType sampleType) { mExtractor = extractor; mTrackIndex = trackIndex; mMuxer = muxer; mSampleType = sampleType; mActualOutputFormat = mExtractor.getTrackFormat(mTrackIndex); mMuxer.setOutputFormat(mSampleType, mActualOutputFormat); mBufferSize = mActualOutputFormat.getInteger(MediaFormat.KEY_MAX_INPUT_SIZE); mBuffer = ByteBuffer.allocateDirect(mBufferSize).order(ByteOrder.nativeOrder()); }
/** * @param src_file * @return first video track index, -1 if not found */ protected int internal_prepare_video(final String source_file) { int trackindex = -1; mVideoMediaExtractor = new MediaExtractor(); try { mVideoMediaExtractor.setDataSource(source_file); trackindex = selectTrack(mVideoMediaExtractor, "video/"); if (trackindex >= 0) { mVideoMediaExtractor.selectTrack(trackindex); final MediaFormat format = mVideoMediaExtractor.getTrackFormat(trackindex); mVideoWidth = format.getInteger(MediaFormat.KEY_WIDTH); mVideoHeight = format.getInteger(MediaFormat.KEY_HEIGHT); mDuration = format.getLong(MediaFormat.KEY_DURATION); if (DEBUG) Log.v( TAG, String.format( "format:size(%d,%d),duration=%d,bps=%d,framerate=%f,rotation=%d", mVideoWidth, mVideoHeight, mDuration, mBitrate, mFrameRate, mRotation)); } } catch (final IOException e) { } return trackindex; }
public static void validateVideoOutputFormat(MediaFormat format) { String mime = format.getString(MediaFormat.KEY_MIME); // Refer: http://developer.android.com/guide/appendix/media-formats.html#core // Refer: http://en.wikipedia.org/wiki/MPEG-4_Part_14#Data_streams if (!MediaFormatExtraConstants.MIMETYPE_VIDEO_AVC.equals(mime)) { throw new InvalidOutputFormatException( "Video codecs other than AVC is not supported, actual mime type: " + mime); } ByteBuffer spsBuffer = AvcCsdUtils.getSpsBuffer(format); SeqParameterSet sps = H264Utils.readSPS(spsBuffer); if (sps.profile_idc != PROFILE_IDC_BASELINE) { throw new InvalidOutputFormatException( "Non-baseline AVC video profile is not supported by Android OS, actual profile_idc: " + sps.profile_idc); } }
public void setDataSource(MediaSource source) throws IOException { mVideoExtractor = source.getVideoExtractor(); mAudioExtractor = source.getAudioExtractor(); mVideoTrackIndex = -1; mAudioTrackIndex = -1; for (int i = 0; i < mVideoExtractor.getTrackCount(); ++i) { MediaFormat format = mVideoExtractor.getTrackFormat(i); Log.d(TAG, format.toString()); String mime = format.getString(MediaFormat.KEY_MIME); if (mVideoTrackIndex < 0 && mime.startsWith("video/")) { mVideoExtractor.selectTrack(i); mVideoTrackIndex = i; mVideoFormat = format; mVideoMinPTS = mVideoExtractor.getSampleTime(); } else if (mAudioExtractor == null && mAudioTrackIndex < 0 && mime.startsWith("audio/")) { mVideoExtractor.selectTrack(i); mAudioTrackIndex = i; mAudioFormat = format; mAudioMinPTS = mVideoExtractor.getSampleTime(); } } if (mAudioExtractor != null) { for (int i = 0; i < mAudioExtractor.getTrackCount(); ++i) { MediaFormat format = mAudioExtractor.getTrackFormat(i); Log.d(TAG, format.toString()); String mime = format.getString(MediaFormat.KEY_MIME); if (mAudioTrackIndex < 0 && mime.startsWith("audio/")) { mAudioExtractor.selectTrack(i); mAudioTrackIndex = i; mAudioFormat = format; mAudioMinPTS = mAudioExtractor.getSampleTime(); } } } if (mVideoFormat == null) { throw new IOException("no video track found"); } else { if (mAudioFormat == null) { Log.i(TAG, "no audio track found"); } if (mPlaybackThread == null) { if (mSurface == null) { Log.i(TAG, "no video output surface specified"); } mPlaybackThread = new PlaybackThread(); mPlaybackThread.start(); } } }
/** * Generates a test video file, saving it as VideoChunks. We generate frames with GL to avoid * having to deal with multiple YUV formats. * * @return true on success, false on "soft" failure */ private boolean generateVideoFile(VideoChunks output) { if (VERBOSE) Log.d(TAG, "generateVideoFile " + mWidth + "x" + mHeight); MediaCodec encoder = null; InputSurface inputSurface = null; try { MediaCodecInfo codecInfo = selectCodec(MIME_TYPE); if (codecInfo == null) { // Don't fail CTS if they don't have an AVC codec (not here, anyway). Log.e(TAG, "Unable to find an appropriate codec for " + MIME_TYPE); return false; } if (VERBOSE) Log.d(TAG, "found codec: " + codecInfo.getName()); // We avoid the device-specific limitations on width and height by using values that // are multiples of 16, which all tested devices seem to be able to handle. MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); if (VERBOSE) Log.d(TAG, "format: " + format); output.setMediaFormat(format); // Create a MediaCodec for the desired codec, then configure it as an encoder with // our desired properties. encoder = MediaCodec.createByCodecName(codecInfo.getName()); encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); inputSurface = new InputSurface(encoder.createInputSurface()); inputSurface.makeCurrent(); encoder.start(); generateVideoData(encoder, inputSurface, output); } finally { if (encoder != null) { if (VERBOSE) Log.d(TAG, "releasing encoder"); encoder.stop(); encoder.release(); if (VERBOSE) Log.d(TAG, "released encoder"); } if (inputSurface != null) { inputSurface.release(); } } return true; }
/** * Create the display surface out of the encoder. The data to encoder will be fed from this * Surface itself. * * @return * @throws IOException */ @TargetApi(19) private Surface createDisplaySurface() throws IOException { MediaFormat mMediaFormat = MediaFormat.createVideoFormat(CodecUtils.MIME_TYPE, CodecUtils.WIDTH, CodecUtils.HEIGHT); mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, (int) (1024 * 1024 * 0.5)); mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 30); mMediaFormat.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1); Log.i(TAG, "Starting encoder"); encoder = MediaCodec.createEncoderByType(CodecUtils.MIME_TYPE); encoder.configure(mMediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); Surface surface = encoder.createInputSurface(); return surface; }
@Override public int addTrack(MediaFormat trackFormat) { // With FFmpeg, we want to write the encoder's // BUFFER_FLAG_CODEC_CONFIG buffer directly via writeSampleData // Whereas with MediaMuxer this call handles that. // TODO: Ensure addTrack isn't called more times than it should be... // TODO: Make an FFmpegWrapper API that sets mVideo/AudioTrackIndex instead of hard-code int trackIndex; if (trackFormat.getString(MediaFormat.KEY_MIME).compareTo("video/avc") == 0) trackIndex = mVideoTrackIndex; else trackIndex = mAudioTrackIndex; if (formatRequiresBuffering()) { mHandler.sendMessage(mHandler.obtainMessage(MSG_ADD_TRACK, trackFormat)); synchronized (mMuxerInputQueue) { while (mMuxerInputQueue.size() < trackIndex + 1) mMuxerInputQueue.add(new ArrayDeque<ByteBuffer>()); } } else { handleAddTrack(trackFormat); } return trackIndex; }
public void prepareEncode() { MediaFormat mediaFormat = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight); // 描述视频格式的信息 mediaFormat.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, BITRATE); // 比特率 mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); // 帧数 mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); // 关键帧间隔时间 try { mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); mEncoder.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mSurface = mEncoder.createInputSurface(); mEncoder.start(); mediaMuxer = new MediaMuxer(filePath, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); } catch (IOException e) { e.printStackTrace(); } }
/** * Configures encoder and muxer state, and prepares the input Surface. Initializes mEncoder, * mMuxer, mRecordingSurface, mBufferInfo, mTrackIndex, and mMuxerStarted. */ private void configureMediaCodecEncoder() { mBufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mStreamSize.getWidth(), mStreamSize.getHeight()); /** * Set encoding properties. Failing to specify some of these can cause the MediaCodec * configure() call to throw an exception. */ format.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, mEncBitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); Log.i(TAG, "configure video encoding format: " + format); // Create/configure a MediaCodec encoder. mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mRecordingSurface = mEncoder.createInputSurface(); mEncoder.start(); String outputFileName = getOutputMediaFileName(); if (outputFileName == null) { throw new IllegalStateException("Failed to get video output file"); } /** * Create a MediaMuxer. We can't add the video track and start() the muxer until the encoder * starts and notifies the new media format. */ try { mMuxer = new MediaMuxer(outputFileName, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4); } catch (IOException ioe) { throw new IllegalStateException("MediaMuxer creation failed", ioe); } mMuxerStarted = false; }
/** Configures encoder and muxer state, and prepares the input Surface. */ public VideoEncoderCore(int width, int height, int bitRate, Muxer muxer) throws IOException { mMuxer = muxer; mBufferInfo = new MediaCodec.BufferInfo(); MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height); // Set some properties. Failing to specify some of these can cause the MediaCodec // configure() call to throw an unhelpful exception. format.setInteger( MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface); format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE); format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL); if (VERBOSE) Log.d(TAG, "format: " + format); // Create a MediaCodec encoder, and configure it with our format. Get a Surface // we can use for input and wrap it with a class that handles the EGL work. mEncoder = MediaCodec.createEncoderByType(MIME_TYPE); mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE); mInputSurface = mEncoder.createInputSurface(); mEncoder.start(); mTrackIndex = -1; }
public int getVideoHeight() { return mVideoFormat != null ? mVideoFormat.getInteger(MediaFormat.KEY_HEIGHT) : 0; }
@CalledByNative private static boolean decodeAudioFile( Context ctx, int nativeMediaCodecBridge, int inputFD, long dataSize) { if (dataSize < 0 || dataSize > 0x7fffffff) return false; MediaExtractor extractor = new MediaExtractor(); ParcelFileDescriptor encodedFD; encodedFD = ParcelFileDescriptor.adoptFd(inputFD); try { extractor.setDataSource(encodedFD.getFileDescriptor(), 0, dataSize); } catch (Exception e) { e.printStackTrace(); encodedFD.detachFd(); return false; } if (extractor.getTrackCount() <= 0) { encodedFD.detachFd(); return false; } MediaFormat format = extractor.getTrackFormat(0); // Number of channels specified in the file int inputChannelCount = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT); // Number of channels the decoder will provide. (Not // necessarily the same as inputChannelCount. See // crbug.com/266006.) int outputChannelCount = inputChannelCount; int sampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE); String mime = format.getString(MediaFormat.KEY_MIME); long durationMicroseconds = 0; if (format.containsKey(MediaFormat.KEY_DURATION)) { try { durationMicroseconds = format.getLong(MediaFormat.KEY_DURATION); } catch (Exception e) { Log.d(LOG_TAG, "Cannot get duration"); } } if (DEBUG) { Log.d( LOG_TAG, "Tracks: " + extractor.getTrackCount() + " Rate: " + sampleRate + " Channels: " + inputChannelCount + " Mime: " + mime + " Duration: " + durationMicroseconds + " microsec"); } nativeInitializeDestination( nativeMediaCodecBridge, inputChannelCount, sampleRate, durationMicroseconds); // Create decoder MediaCodec codec = MediaCodec.createDecoderByType(mime); codec.configure(format, null /* surface */, null /* crypto */, 0 /* flags */); codec.start(); ByteBuffer[] codecInputBuffers = codec.getInputBuffers(); ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers(); // A track must be selected and will be used to read samples. extractor.selectTrack(0); boolean sawInputEOS = false; boolean sawOutputEOS = false; // Keep processing until the output is done. while (!sawOutputEOS) { if (!sawInputEOS) { // Input side int inputBufIndex = codec.dequeueInputBuffer(TIMEOUT_MICROSECONDS); if (inputBufIndex >= 0) { ByteBuffer dstBuf = codecInputBuffers[inputBufIndex]; int sampleSize = extractor.readSampleData(dstBuf, 0); long presentationTimeMicroSec = 0; if (sampleSize < 0) { sawInputEOS = true; sampleSize = 0; } else { presentationTimeMicroSec = extractor.getSampleTime(); } codec.queueInputBuffer( inputBufIndex, 0, /* offset */ sampleSize, presentationTimeMicroSec, sawInputEOS ? MediaCodec.BUFFER_FLAG_END_OF_STREAM : 0); if (!sawInputEOS) { extractor.advance(); } } } // Output side MediaCodec.BufferInfo info = new BufferInfo(); final int outputBufIndex = codec.dequeueOutputBuffer(info, TIMEOUT_MICROSECONDS); if (outputBufIndex >= 0) { ByteBuffer buf = codecOutputBuffers[outputBufIndex]; if (info.size > 0) { nativeOnChunkDecoded( nativeMediaCodecBridge, buf, info.size, inputChannelCount, outputChannelCount); } buf.clear(); codec.releaseOutputBuffer(outputBufIndex, false /* render */); if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) { sawOutputEOS = true; } } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) { codecOutputBuffers = codec.getOutputBuffers(); } else if (outputBufIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) { MediaFormat newFormat = codec.getOutputFormat(); outputChannelCount = newFormat.getInteger(MediaFormat.KEY_CHANNEL_COUNT); Log.d(LOG_TAG, "output format changed to " + newFormat); } } encodedFD.detachFd(); codec.stop(); codec.release(); codec = null; return true; }
public int getDuration() { return mVideoFormat != null ? (int) (mVideoFormat.getLong(MediaFormat.KEY_DURATION) / 1000) : 0; }
// Pass null in |sharedContext| to configure the codec for ByteBuffer output. private boolean initDecode(VideoCodecType type, int width, int height, EGLContext sharedContext) { if (mediaCodecThread != null) { throw new RuntimeException("Forgot to release()?"); } useSurface = (sharedContext != null); String mime = null; String[] supportedCodecPrefixes = null; if (type == VideoCodecType.VIDEO_CODEC_VP8) { mime = VP8_MIME_TYPE; supportedCodecPrefixes = supportedVp8HwCodecPrefixes; } else if (type == VideoCodecType.VIDEO_CODEC_H264) { mime = H264_MIME_TYPE; supportedCodecPrefixes = supportedH264HwCodecPrefixes; } else { throw new RuntimeException("Non supported codec " + type); } DecoderProperties properties = findDecoder(mime, supportedCodecPrefixes); if (properties == null) { throw new RuntimeException("Cannot find HW decoder for " + type); } Logging.d( TAG, "Java initDecode: " + type + " : " + width + " x " + height + ". Color: 0x" + Integer.toHexString(properties.colorFormat) + ". Use Surface: " + useSurface); if (sharedContext != null) { Logging.d(TAG, "Decoder shared EGL Context: " + sharedContext); } mediaCodecThread = Thread.currentThread(); try { Surface decodeSurface = null; this.width = width; this.height = height; stride = width; sliceHeight = height; if (useSurface) { // Create shared EGL context. eglBase = new EglBase(sharedContext, EglBase.ConfigType.PIXEL_BUFFER); eglBase.createDummyPbufferSurface(); eglBase.makeCurrent(); // Create output surface textureID = GlUtil.generateTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES); Logging.d(TAG, "Video decoder TextureID = " + textureID); surfaceTexture = new SurfaceTexture(textureID); surface = new Surface(surfaceTexture); decodeSurface = surface; } MediaFormat format = MediaFormat.createVideoFormat(mime, width, height); if (!useSurface) { format.setInteger(MediaFormat.KEY_COLOR_FORMAT, properties.colorFormat); } Logging.d(TAG, " Format: " + format); mediaCodec = MediaCodecVideoEncoder.createByCodecName(properties.codecName); if (mediaCodec == null) { return false; } mediaCodec.configure(format, decodeSurface, null, 0); mediaCodec.start(); colorFormat = properties.colorFormat; outputBuffers = mediaCodec.getOutputBuffers(); inputBuffers = mediaCodec.getInputBuffers(); Logging.d( TAG, "Input buffers: " + inputBuffers.length + ". Output buffers: " + outputBuffers.length); return true; } catch (IllegalStateException e) { Logging.e(TAG, "initDecode failed", e); return false; } }