Beispiel #1
0
  public void start() throws Exception {
    // Open video file
    AVInputFormat f = null;
    if (format != null && format.length() > 0) {
      f = av_find_input_format(format);
      if (f == null) {
        throw new Exception("Could not find input format \"" + format + "\".");
      }
    }
    AVFormatParameters fp = null;
    if (frameRate > 0 || bpp > 0 || imageWidth > 0 || imageHeight > 0) {
      fp = new AVFormatParameters();
      fp.time_base(av_d2q(1 / frameRate, FFmpegFrameRecorder.DEFAULT_FRAME_RATE_BASE));
      fp.sample_rate(bpp);
      fp.channels(colorMode == ColorMode.BGR ? 3 : 1);
      fp.width(imageWidth);
      fp.height(imageHeight);
    }
    if (av_open_input_file(pFormatCtx, filename, f, 0, fp) != 0) {
      throw new Exception("Could not open file \"" + filename + "\".");
    }

    // Retrieve stream information
    if (av_find_stream_info(pFormatCtx) < 0) {
      throw new Exception("Could not find stream information.");
    }

    // Dump information about file onto standard error
    dump_format(pFormatCtx, 0, filename, 0);

    // Find the first video stream
    videoStream = -1;
    int nb_streams = pFormatCtx.nb_streams();
    for (int i = 0; i < nb_streams; i++) {
      pStream = pFormatCtx.streams(i);
      // Get a pointer to the codec context for the video stream
      pCodecCtx = pStream.codec();
      if (pCodecCtx.codec_type() == CODEC_TYPE_VIDEO) {
        videoStream = i;
        break;
      }
    }
    if (videoStream == -1) {
      throw new Exception("Did not find a video stream.");
    }

    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtx.codec_id());
    if (pCodec == null) {
      throw new Exception("Unsupported codec or codec not found: " + pCodecCtx.codec_id() + ".");
    }

    // Open codec
    if (avcodec_open(pCodecCtx, pCodec) < 0) {
      throw new Exception("Could not open codec.");
    }

    // Allocate video frame
    pFrame = avcodec_alloc_frame();

    // Allocate an AVFrame structure
    pFrameRGB = avcodec_alloc_frame();
    if (pFrameRGB == null) {
      throw new Exception("Could not allocate frame.");
    }

    int width = getImageWidth() > 0 ? getImageWidth() : pCodecCtx.width();
    int height = getImageHeight() > 0 ? getImageHeight() : pCodecCtx.height();

    switch (colorMode) {
      case BGR:
        // Determine required buffer size and allocate buffer
        numBytes = avpicture_get_size(PIX_FMT_BGR24, width, height);
        buffer = new BytePointer(av_malloc(numBytes));

        // Assign appropriate parts of buffer to image planes in pFrameRGB
        // Note that pFrameRGB is an AVFrame, but AVFrame is a superset
        // of AVPicture
        avpicture_fill(pFrameRGB, buffer, PIX_FMT_BGR24, width, height);

        // Convert the image into BGR format that OpenCV uses
        img_convert_ctx =
            sws_getContext(
                pCodecCtx.width(),
                pCodecCtx.height(),
                pCodecCtx.pix_fmt(),
                width,
                height,
                PIX_FMT_BGR24,
                SWS_BILINEAR,
                null,
                null,
                null);
        if (img_convert_ctx == null) {
          throw new Exception("Cannot initialize the conversion context.");
        }

        return_image = IplImage.createHeader(width, height, IPL_DEPTH_8U, 3);
        break;

      case GRAY:
        numBytes = avpicture_get_size(PIX_FMT_GRAY8, width, height);
        buffer = new BytePointer(av_malloc(numBytes));
        avpicture_fill(pFrameRGB, buffer, PIX_FMT_GRAY8, width, height);

        // Convert the image into GRAY format that OpenCV uses
        img_convert_ctx =
            sws_getContext(
                pCodecCtx.width(),
                pCodecCtx.height(),
                pCodecCtx.pix_fmt(),
                width,
                height,
                PIX_FMT_GRAY8,
                SWS_BILINEAR,
                null,
                null,
                null);
        if (img_convert_ctx == null) {
          throw new Exception("Cannot initialize the conversion context.");
        }

        return_image = IplImage.createHeader(width, height, IPL_DEPTH_8U, 1);
        break;

      case RAW:
        numBytes = 0;
        buffer = null;
        img_convert_ctx = null;
        return_image =
            IplImage.createHeader(pCodecCtx.width(), pCodecCtx.height(), IPL_DEPTH_8U, 1);
        break;

      default:
        assert (false);
    }
  }