public void stop() throws Exception { // Free the RGB image if (buffer != null) { av_free(buffer); buffer = null; } if (pFrameRGB != null) { av_free(pFrameRGB); pFrameRGB = null; } // Free the YUV frame if (pFrame != null) { av_free(pFrame); pFrame = null; } // Close the codec if (pCodecCtx != null) { avcodec_close(pCodecCtx); pCodecCtx = null; } // Close the video file if (pFormatCtx != null && !pFormatCtx.isNull()) { av_close_input_file(pFormatCtx); pFormatCtx = null; } if (return_image != null) { return_image.release(); return_image = null; } }
public void trigger() throws Exception { if (pFormatCtx == null || pFormatCtx.isNull()) { throw new Exception("Could not trigger: No AVFormatContext. (Has start() been called?)"); } for (int i = 0; i < triggerFlushSize; i++) { if (av_read_frame(pFormatCtx, packet) < 0) { return; } av_free_packet(packet); } }
public IplImage grab() throws Exception { if (pFormatCtx == null || pFormatCtx.isNull()) { throw new Exception("Could not grab: No AVFormatContext. (Has start() been called?)"); } boolean done = false; long pts = 0; while (!done) { if (av_read_frame(pFormatCtx, packet) < 0) { // throw new Exception("Could not read frame"); return null; // end of file? } // Is this a packet from the video stream? if (packet.stream_index() == videoStream) { // Decode video frame int len = avcodec_decode_video2(pCodecCtx, pFrame, frameFinished, packet); LongPointer opaque = new LongPointer(pFrame.opaque()); if (packet.dts() != AV_NOPTS_VALUE) { pts = packet.dts(); } else if (!opaque.isNull() && opaque.get() != AV_NOPTS_VALUE) { pts = opaque.get(); } else { pts = 0; } AVRational time_base = pStream.time_base(); pts = 1000 * pts * time_base.num() / time_base.den(); // Did we get a video frame? if (len > 0 && frameFinished[0] != 0) { switch (colorMode) { case BGR: case GRAY: // Deinterlace Picture if (deinterlace) { avpicture_deinterlace( pFrame, pFrame, pCodecCtx.pix_fmt(), pCodecCtx.width(), pCodecCtx.height()); } // Convert the image from its native format to RGB sws_scale( img_convert_ctx, new PointerPointer(pFrame), pFrame.linesize(), 0, pCodecCtx.height(), new PointerPointer(pFrameRGB), pFrameRGB.linesize()); return_image.imageData(buffer); return_image.widthStep(pFrameRGB.linesize(0)); break; case RAW: assert (pCodecCtx.width() == return_image.width() && pCodecCtx.height() == return_image.height()); return_image.imageData(pFrame.data(0)); return_image.widthStep(pFrame.linesize(0)); break; default: assert (false); } return_image.imageSize(return_image.height() * return_image.widthStep()); done = true; } } // Free the packet that was allocated by av_read_frame av_free_packet(packet); } return_image.timestamp = pts; return return_image; }