protected void prepareVideo(
      IVideoResampler[] iVideoResamplers,
      IVideoPicture[] inputIVideoPictures,
      IVideoPicture[] outputIVideoPictures,
      IStreamCoder inputIStreamCoder,
      IStreamCoder[] outputIStreamCoders,
      IContainer outputIContainer,
      IStream[] outputIStreams,
      ICodec.Type inputICodecType,
      String outputURL,
      int index)
      throws Exception {

    IStream outputIStream = outputIContainer.addNewStream(index);

    outputIStreams[index] = outputIStream;

    IStreamCoder outputIStreamCoder = outputIStream.getStreamCoder();

    outputIStreamCoders[index] = outputIStreamCoder;

    int bitRate = inputIStreamCoder.getBitRate();

    if (_log.isInfoEnabled()) {
      _log.info("Original video bitrate " + bitRate);
    }

    if (bitRate == 0) {
      bitRate =
          GetterUtil.getInteger(_videoBitRateMap.get(_outputVideoFormat), _VIDEO_BIT_RATE_DEFAULT);
    } else if (bitRate > _VIDEO_BIT_RATE_MAX) {
      bitRate = _VIDEO_BIT_RATE_MAX;
    }

    if (_log.isInfoEnabled()) {
      _log.info("Modified video bitrate " + bitRate);
    }

    outputIStreamCoder.setBitRate(bitRate);

    ICodec iCodec = ICodec.guessEncodingCodec(null, null, outputURL, null, inputICodecType);

    if (_outputVideoFormat.equals("mp4")) {
      iCodec = ICodec.findEncodingCodec(ICodec.ID.CODEC_ID_H264);
    }

    if (iCodec == null) {
      throw new RuntimeException(
          "Unable to determine " + inputICodecType + " encoder for " + outputURL);
    }

    outputIStreamCoder.setCodec(iCodec);

    IRational iRational = inputIStreamCoder.getFrameRate();

    if (_log.isInfoEnabled()) {
      _log.info(
          "Original frame rate " + iRational.getNumerator() + "/" + iRational.getDenominator());
    }

    if (_videoFrameRateMap.containsKey(_outputVideoFormat)) {
      iRational = _videoFrameRateMap.get(_outputVideoFormat);
    }

    if (_log.isInfoEnabled()) {
      _log.info(
          "Modified frame rate " + iRational.getNumerator() + "/" + iRational.getDenominator());
    }

    outputIStreamCoder.setFrameRate(iRational);

    if (inputIStreamCoder.getHeight() <= 0) {
      throw new RuntimeException("Unable to determine height for " + _inputURL);
    }

    outputIStreamCoder.setHeight(_height);

    outputIStreamCoder.setPixelType(Type.YUV420P);
    outputIStreamCoder.setTimeBase(
        IRational.make(iRational.getDenominator(), iRational.getNumerator()));

    if (inputIStreamCoder.getWidth() <= 0) {
      throw new RuntimeException("Unable to determine width for " + _inputURL);
    }

    outputIStreamCoder.setWidth(_width);

    iVideoResamplers[index] =
        createIVideoResampler(inputIStreamCoder, outputIStreamCoder, _height, _width);

    inputIVideoPictures[index] =
        IVideoPicture.make(
            inputIStreamCoder.getPixelType(),
            inputIStreamCoder.getWidth(),
            inputIStreamCoder.getHeight());
    outputIVideoPictures[index] =
        IVideoPicture.make(
            outputIStreamCoder.getPixelType(),
            outputIStreamCoder.getWidth(),
            outputIStreamCoder.getHeight());

    ICodec.ID iCodecID = iCodec.getID();

    if (iCodecID.equals(ICodec.ID.CODEC_ID_H264)) {
      Configuration.configure(_ffpresetProperties, outputIStreamCoder);
    }
  }
  protected void prepareVideo(
      IVideoResampler[] iVideoResamplers,
      IVideoPicture[] inputIVideoPictures,
      IVideoPicture[] outputIVideoPictures,
      IStreamCoder inputIStreamCoder,
      IStreamCoder[] outputIStreamCoders,
      IContainer outputIContainer,
      IStream[] outputIStreams,
      ICodec.Type inputICodecType,
      String outputURL,
      int index)
      throws Exception {

    ICodec iCodec = getVideoEncodingICodec(inputICodecType, outputURL);

    if (iCodec == null) {
      throw new RuntimeException(
          "Unable to determine " + inputICodecType + " encoder for " + outputURL);
    }

    IStream outputIStream = outputIContainer.addNewStream(iCodec);

    outputIStreams[index] = outputIStream;

    IStreamCoder outputIStreamCoder = outputIStream.getStreamCoder();

    outputIStreamCoders[index] = outputIStreamCoder;

    int bitRate = inputIStreamCoder.getBitRate();

    if (_log.isInfoEnabled()) {
      _log.info("Original video bitrate " + bitRate);
    }

    bitRate = getVideoBitRate(bitRate);

    if (_log.isInfoEnabled()) {
      _log.info("Modified video bitrate " + bitRate);
    }

    outputIStreamCoder.setBitRate(bitRate);

    IRational iRational = inputIStreamCoder.getFrameRate();

    if (_log.isInfoEnabled()) {
      _log.info(
          "Original frame rate " + iRational.getNumerator() + "/" + iRational.getDenominator());
    }

    iRational = getVideoFrameRate(iRational);

    if (_log.isInfoEnabled()) {
      _log.info(
          "Modified frame rate " + iRational.getNumerator() + "/" + iRational.getDenominator());
    }

    outputIStreamCoder.setFrameRate(iRational);

    if (inputIStreamCoder.getHeight() <= 0) {
      throw new RuntimeException("Unable to determine height for " + _inputURL);
    }

    if (_height == 0) {
      _height = inputIStreamCoder.getHeight();
    }

    outputIStreamCoder.setHeight(_height);

    outputIStreamCoder.setPixelType(Type.YUV420P);
    outputIStreamCoder.setTimeBase(
        IRational.make(iRational.getDenominator(), iRational.getNumerator()));

    if (inputIStreamCoder.getWidth() <= 0) {
      throw new RuntimeException("Unable to determine width for " + _inputURL);
    }

    if (_width == 0) {
      _width = inputIStreamCoder.getWidth();
    }

    outputIStreamCoder.setWidth(_width);

    iVideoResamplers[index] =
        createIVideoResampler(inputIStreamCoder, outputIStreamCoder, _height, _width);

    inputIVideoPictures[index] =
        IVideoPicture.make(
            inputIStreamCoder.getPixelType(),
            inputIStreamCoder.getWidth(),
            inputIStreamCoder.getHeight());
    outputIVideoPictures[index] =
        IVideoPicture.make(
            outputIStreamCoder.getPixelType(),
            outputIStreamCoder.getWidth(),
            outputIStreamCoder.getHeight());

    ICodec.ID iCodecID = iCodec.getID();

    if (iCodecID.equals(ICodec.ID.CODEC_ID_H264)) {
      Configuration.configure(_ffpresetProperties, outputIStreamCoder);
    }
  }
예제 #3
0
  public void decode() {

    int success = videoStream.open();
    if (success < 0) {

      throw new RuntimeException(
          "XUGGLER DECODER: could not open video decoder for container: "
              + input.getLocation().getDecodedURL());
    }

    IAudioSamples decodeSamples = null;

    if (audioStream != null) {

      success = audioStream.open();
      if (success < 0) {

        throw new RuntimeException(
            "XUGGLER DECODER: could not open audio decoder for container: "
                + input.getLocation().getDecodedURL());
      }

      decodeSamples = IAudioSamples.make(1024, audioStream.getChannels());
    }

    IVideoPicture decodePicture =
        IVideoPicture.make(
            videoStream.getPixelType(), videoStream.getWidth(), videoStream.getHeight());

    while (container.readNextPacket(packet) >= 0 && decodeMode != DecodeMode.STOP) {

      /** Find out if this stream has a starting timestamp */
      IStream stream = container.getStream(packet.getStreamIndex());
      long tsOffset = 0;
      if (stream.getStartTime() != Global.NO_PTS
          && stream.getStartTime() > 0
          && stream.getTimeBase() != null) {
        IRational defTimeBase = IRational.make(1, (int) Global.DEFAULT_PTS_PER_SECOND);
        tsOffset = defTimeBase.rescale(stream.getStartTime(), stream.getTimeBase());
      }

      /*
       * Now we have a packet, let's see if it belongs to our video stream
       */
      if (packet.getStreamIndex() == videoStreamIndex) {

        int offset = 0;
        while (offset < packet.getSize()) {
          /*
           * Now, we decode the video, checking for any errors.
           *
           */
          int bytesDecoded = videoStream.decodeVideo(decodePicture, packet, offset);
          if (bytesDecoded < 0) {

            throw new RuntimeException(
                "XUGGLER: error decoding video in: " + input.getLocation().getDecodedURL());
          }

          if (decodePicture.getTimeStamp() != Global.NO_PTS) {

            decodePicture.setTimeStamp(decodePicture.getTimeStamp() - tsOffset);
          }

          offset += bytesDecoded;
          /*
           * Some decoders will consume data in a packet, but will not be able to construct
           * a full video picture yet.  Therefore you should always check if you
           * got a complete picture from the decoder
           */
          if (decodePicture.isComplete()) {

            decodedPicture(decodePicture);
          }
        }

      } else if (audioStream != null
          && packet.getStreamIndex() == audioStreamIndex
          && decodeMode != DecodeMode.IGNORE_AUDIO) {

        /*
         * A packet can actually contain multiple sets of samples (or frames of samples
         * in audio-decoding speak).  So, we may need to call decode audio multiple
         * times at different offsets in the packet's data.  We capture that here.
         */
        int offset = 0;

        /*
         * Keep going until we've processed all data
         */
        while (offset < packet.getSize()) {
          int bytesDecoded = audioStream.decodeAudio(decodeSamples, packet, offset);
          if (bytesDecoded < 0) {
            break;
            // throw new RuntimeException("XUGGLER: got error decoding audio in: " +
            // inputVideoFile);

          }

          if (decodeSamples.getTimeStamp() != Global.NO_PTS) {

            decodeSamples.setTimeStamp(decodeSamples.getTimeStamp() - tsOffset);
          }

          offset += bytesDecoded;
          /*
           * Some decoder will consume data in a packet, but will not be able to construct
           * a full set of samples yet.  Therefore you should always check if you
           * got a complete set of samples from the decoder
           */
          if (decodeSamples.isComplete()) {

            decodedAudioSamples(decodeSamples);
          }
        }

      } else {

        /*
         * This packet isn't part of our video stream, so we just
         * silently drop it.
         */
        continue;
      }
    }
  }
  /**
   * Takes a media container (file) as the first argument, opens it, opens up a Swing window and
   * displays video frames with <i>roughly</i> the right timing.
   *
   * @param args Must contain one string which represents a filename
   */
  @SuppressWarnings("deprecation")
  public static void main(String[] args) {
    if (args.length <= 0)
      throw new IllegalArgumentException("must pass in a filename" + " as the first argument");

    String filename = args[0];

    // Let's make sure that we can actually convert video pixel formats.
    if (!IVideoResampler.isSupported(IVideoResampler.Feature.FEATURE_COLORSPACECONVERSION))
      throw new RuntimeException(
          "you must install the GPL version"
              + " of Xuggler (with IVideoResampler support) for "
              + "this demo to work");

    // Create a Xuggler container object
    IContainer container = IContainer.make();

    // Open up the container
    if (container.open(filename, IContainer.Type.READ, null) < 0)
      throw new IllegalArgumentException("could not open file: " + filename);

    // query how many streams the call to open found
    int numStreams = container.getNumStreams();

    // and iterate through the streams to find the first video stream
    int videoStreamId = -1;
    IStreamCoder videoCoder = null;
    for (int i = 0; i < numStreams; i++) {
      // Find the stream object
      IStream stream = container.getStream(i);
      // Get the pre-configured decoder that can decode this stream;
      IStreamCoder coder = stream.getStreamCoder();

      if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {
        videoStreamId = i;
        videoCoder = coder;
        break;
      }
    }
    if (videoStreamId == -1)
      throw new RuntimeException("could not find video stream in container: " + filename);

    /*
     * Now we have found the video stream in this file.  Let's open up our decoder so it can
     * do work.
     */
    if (videoCoder.open() < 0)
      throw new RuntimeException("could not open video decoder for container: " + filename);

    IVideoResampler resampler = null;
    if (videoCoder.getPixelType() != IPixelFormat.Type.BGR24) {
      // if this stream is not in BGR24, we're going to need to
      // convert it.  The VideoResampler does that for us.
      resampler =
          IVideoResampler.make(
              videoCoder.getWidth(),
              videoCoder.getHeight(),
              IPixelFormat.Type.BGR24,
              videoCoder.getWidth(),
              videoCoder.getHeight(),
              videoCoder.getPixelType());
      if (resampler == null)
        throw new RuntimeException("could not create color space " + "resampler for: " + filename);
    }
    /*
     * And once we have that, we draw a window on screen
     */
    openJavaWindow();

    /*
     * Now, we start walking through the container looking at each packet.
     */
    IPacket packet = IPacket.make();
    long firstTimestampInStream = Global.NO_PTS;
    long systemClockStartTime = 0;
    while (container.readNextPacket(packet) >= 0) {
      /*
       * Now we have a packet, let's see if it belongs to our video stream
       */
      if (packet.getStreamIndex() == videoStreamId) {
        /*
         * We allocate a new picture to get the data out of Xuggler
         */
        IVideoPicture picture =
            IVideoPicture.make(
                videoCoder.getPixelType(), videoCoder.getWidth(), videoCoder.getHeight());

        int offset = 0;
        while (offset < packet.getSize()) {
          /*
           * Now, we decode the video, checking for any errors.
           *
           */
          int bytesDecoded = videoCoder.decodeVideo(picture, packet, offset);
          if (bytesDecoded < 0)
            throw new RuntimeException("got error decoding video in: " + filename);
          offset += bytesDecoded;

          /*
           * Some decoders will consume data in a packet, but will not be able to construct
           * a full video picture yet.  Therefore you should always check if you
           * got a complete picture from the decoder
           */
          if (picture.isComplete()) {
            IVideoPicture newPic = picture;
            /*
             * If the resampler is not null, that means we didn't get the
             * video in BGR24 format and
             * need to convert it into BGR24 format.
             */
            if (resampler != null) {
              // we must resample
              newPic =
                  IVideoPicture.make(
                      resampler.getOutputPixelFormat(), picture.getWidth(), picture.getHeight());
              if (resampler.resample(newPic, picture) < 0)
                throw new RuntimeException("could not resample video from: " + filename);
            }
            if (newPic.getPixelType() != IPixelFormat.Type.BGR24)
              throw new RuntimeException(
                  "could not decode video" + " as BGR 24 bit data in: " + filename);

            /**
             * We could just display the images as quickly as we decode them, but it turns out we
             * can decode a lot faster than you think.
             *
             * <p>So instead, the following code does a poor-man's version of trying to match up the
             * frame-rate requested for each IVideoPicture with the system clock time on your
             * computer.
             *
             * <p>Remember that all Xuggler IAudioSamples and IVideoPicture objects always give
             * timestamps in Microseconds, relative to the first decoded item. If instead you used
             * the packet timestamps, they can be in different units depending on your IContainer,
             * and IStream and things can get hairy quickly.
             */
            if (firstTimestampInStream == Global.NO_PTS) {
              // This is our first time through
              firstTimestampInStream = picture.getTimeStamp();
              // get the starting clock time so we can hold up frames
              // until the right time.
              systemClockStartTime = System.currentTimeMillis();
            } else {
              long systemClockCurrentTime = System.currentTimeMillis();
              long millisecondsClockTimeSinceStartofVideo =
                  systemClockCurrentTime - systemClockStartTime;
              // compute how long for this frame since the first frame in the
              // stream.
              // remember that IVideoPicture and IAudioSamples timestamps are
              // always in MICROSECONDS,
              // so we divide by 1000 to get milliseconds.
              long millisecondsStreamTimeSinceStartOfVideo =
                  (picture.getTimeStamp() - firstTimestampInStream) / 1000;
              final long millisecondsTolerance = 50; // and we give ourselfs 50 ms of tolerance
              final long millisecondsToSleep =
                  (millisecondsStreamTimeSinceStartOfVideo
                      - (millisecondsClockTimeSinceStartofVideo + millisecondsTolerance));
              if (millisecondsToSleep > 0) {
                try {
                  Thread.sleep(millisecondsToSleep);
                } catch (InterruptedException e) {
                  // we might get this when the user closes the dialog box, so
                  // just return from the method.
                  return;
                }
              }
            }

            // And finally, convert the BGR24 to an Java buffered image
            BufferedImage javaImage = Utils.videoPictureToImage(newPic);

            // and display it on the Java Swing window
            updateJavaWindow(javaImage);
          }
        }
      } else {
        /*
         * This packet isn't part of our video stream, so we just
         * silently drop it.
         */
        do {} while (false);
      }
    }
    /*
     * Technically since we're exiting anyway, these will be cleaned up by
     * the garbage collector... but because we're nice people and want
     * to be invited places for Christmas, we're going to show how to clean up.
     */
    if (videoCoder != null) {
      videoCoder.close();
      videoCoder = null;
    }
    if (container != null) {
      container.close();
      container = null;
    }
    closeJavaWindow();
  }
예제 #5
0
        /**
         * Takes the input picture, and if in YUV420P pixel format, converts it to half grayscale
         */
        public IVideoPicture preEncode(IVideoPicture picture) {

          /*if (picture.isComplete()) {
          	System.out.println("Transcoding");
          	BufferedImage img = convertToType(game.getScreenContents(),
          			BufferedImage.TYPE_3BYTE_BGR);
          	IConverter converter = ConverterFactory.createConverter(img,
          			picture.getPixelType());
          	picture = converter.toPicture(img, timeStamp);
          	//timeStamp += 33000;
          }
          return picture;*/

          final boolean doGrayscale = true;
          if (doGrayscale
              && picture.isComplete()
              && picture.getPixelType() == IPixelFormat.Type.YUV420P) {
            BufferedImage img =
                convertToType(game.getScreenContents(), BufferedImage.TYPE_3BYTE_BGR);
            IConverter converter = ConverterFactory.createConverter(img, picture.getPixelType());

            picture = converter.toPicture(img, picture.getTimeStamp());
            /*
            //System.out.println("grayscaling half of a video picture for fun and profit");
            int width = picture.getWidth();
            int height = picture.getHeight();

            IBuffer buffer = picture.getData();
            java.nio.ByteBuffer bytes = buffer.getByteBuffer(0, buffer.getBufferSize());

            // First we mirror just the Y pixels from the right half to the left half.
            int pixelWidthToMirror = width /2; // round down

            // Iterate line by line through the Y plane
            for(int line=0; line < height; line++)
            {
              int lineStart = line*width; // find the offset in the buffer where the next Y line starts
              for(int i = 0; i < pixelWidthToMirror; i++)
              {
                // this line reverses the bits in each Y line
                bytes.put(lineStart+i, bytes.get(lineStart+width-1-i));
              }
            }

            // YUV420 P contains all the Y pixels, then all the U values and then all the V values in a picture.
            // To make gray scale, simply leave all Y pixels and 0 out all the U and V values in the line
            // that are beyond half the picture.
            int uwidth = -((-width)>>1); // round-up
            int uheight = -((-height)>>1); // round-up

            int pixelWidthToGray = Math.min(uwidth/2, mGrayScaleMask.length); // round down
            if (pixelWidthToGray > 0)
            {
              int startingUOffset = width*height; // skip all Y values
              int startingVOffset = width*height + uwidth*uheight; // skip all Y and all U values
              for(int line = 0; line < uheight; line++)
              {
                // Find the U values for the line for we're processing
                bytes.position(startingUOffset+line*uwidth);
                // And gray out the first half of the line
                bytes.put(mGrayScaleMask, 0, pixelWidthToGray);
                // Find the V values for the line we're processing
                bytes.position(startingVOffset+line*uwidth);
                // And gray out the first half of the line
                bytes.put(mGrayScaleMask, 0, pixelWidthToGray);
              }
            }
            bytes = null; // tell the JVM they can collect this when they want.
            // and release the IBuffer
            buffer = null;*/
          }

          return picture;
        }