Example #1
0
  public void open(VirtualInputFile input) {

    close();

    this.input = input;

    container = IContainer.make();
    format = IContainerFormat.make();
    packet = IPacket.make();

    // open the input video file
    int success = container.open(input, format, true, true);
    /*		int success = -1;

    		try {
    			RandomAccessFile f = new RandomAccessFile("j:/grey.avi", "r");
    			success = container.open(f, Type.READ, format);

    		} catch (FileNotFoundException e) {
    			// TODO Auto-generated catch block
    			e.printStackTrace();
    		}
    */

    if (success < 0) {

      throw new RuntimeException(
          "XUGGLER DECODER: could not open input: " + input.getLocation().getDecodedURL());
    }

    for (int streamIndex = 0; streamIndex < container.getNumStreams(); streamIndex++) {

      IStreamCoder coder = container.getStream(streamIndex).getStreamCoder();

      switch (coder.getCodecType()) {
        case CODEC_TYPE_VIDEO:
          {
            videoStream = coder;
            videoStreamIndex = streamIndex;
            break;
          }
        case CODEC_TYPE_AUDIO:
          {
            audioStream = coder;
            audioStreamIndex = streamIndex;
            break;
          }
        default:
          {
            break;
          }
      }
    }

    setDecodeMode(DecodeMode.NORMAL);
  }
  protected void doConvert() throws Exception {
    _inputIContainer = IContainer.make();
    _outputIContainer = IContainer.make();

    openContainer(_inputIContainer, _inputURL, false);
    openContainer(_outputIContainer, _outputURL, true);

    int inputStreamsCount = _inputIContainer.getNumStreams();

    if (inputStreamsCount < 0) {
      throw new RuntimeException("Input URL does not have any streams");
    }

    IContainerFormat iContainerFormat = _outputIContainer.getContainerFormat();

    _outputVideoFormat = iContainerFormat.getOutputFormatShortName();

    IAudioResampler[] iAudioResamplers = new IAudioResampler[inputStreamsCount];
    IVideoResampler[] iVideoResamplers = new IVideoResampler[inputStreamsCount];

    IAudioSamples[] inputIAudioSamples = new IAudioSamples[inputStreamsCount];
    IAudioSamples[] outputIAudioSamples = new IAudioSamples[inputStreamsCount];

    IVideoPicture[] inputIVideoPictures = new IVideoPicture[inputStreamsCount];
    IVideoPicture[] outputIVideoPictures = new IVideoPicture[inputStreamsCount];

    IStream[] outputIStreams = new IStream[inputStreamsCount];

    IStreamCoder[] inputIStreamCoders = new IStreamCoder[inputStreamsCount];
    IStreamCoder[] outputIStreamCoders = new IStreamCoder[inputStreamsCount];

    for (int i = 0; i < inputStreamsCount; i++) {
      IStream inputIStream = _inputIContainer.getStream(i);

      IStreamCoder inputIStreamCoder = inputIStream.getStreamCoder();

      inputIStreamCoders[i] = inputIStreamCoder;

      ICodec.Type inputICodecType = inputIStreamCoder.getCodecType();

      if (inputICodecType == ICodec.Type.CODEC_TYPE_AUDIO) {
        prepareAudio(
            iAudioResamplers,
            inputIAudioSamples,
            outputIAudioSamples,
            inputIStreamCoder,
            outputIStreamCoders,
            _outputIContainer,
            outputIStreams,
            inputICodecType,
            _outputURL,
            i);
      } else if (inputICodecType == ICodec.Type.CODEC_TYPE_VIDEO) {
        prepareVideo(
            iVideoResamplers,
            inputIVideoPictures,
            outputIVideoPictures,
            inputIStreamCoder,
            outputIStreamCoders,
            _outputIContainer,
            outputIStreams,
            inputICodecType,
            _outputURL,
            i);
      }

      openStreamCoder(inputIStreamCoders[i]);
      openStreamCoder(outputIStreamCoders[i]);
    }

    if (_outputIContainer.writeHeader() < 0) {
      throw new RuntimeException("Unable to write container header");
    }

    boolean keyPacketFound = false;
    int nonKeyAfterKeyCount = 0;
    boolean onlyDecodeKeyPackets = false;
    int previousPacketSize = -1;

    IPacket inputIPacket = IPacket.make();
    IPacket outputIPacket = IPacket.make();

    while (_inputIContainer.readNextPacket(inputIPacket) == 0) {
      if (_log.isDebugEnabled()) {
        _log.debug("Current packet size " + inputIPacket.getSize());
      }

      int streamIndex = inputIPacket.getStreamIndex();

      IStreamCoder inputIStreamCoder = inputIStreamCoders[streamIndex];
      IStreamCoder outputIStreamCoder = outputIStreamCoders[streamIndex];

      if (outputIStreamCoder == null) {
        continue;
      }

      IStream iStream = _inputIContainer.getStream(streamIndex);

      long timeStampOffset = getStreamTimeStampOffset(iStream);

      if (inputIStreamCoder.getCodecType() == ICodec.Type.CODEC_TYPE_AUDIO) {

        decodeAudio(
            iAudioResamplers[streamIndex],
            inputIAudioSamples[streamIndex],
            outputIAudioSamples[streamIndex],
            inputIPacket,
            outputIPacket,
            inputIStreamCoder,
            outputIStreamCoder,
            _outputIContainer,
            inputIPacket.getSize(),
            previousPacketSize,
            streamIndex,
            timeStampOffset);
      } else if (inputIStreamCoder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {

        keyPacketFound = isKeyPacketFound(inputIPacket, keyPacketFound);

        nonKeyAfterKeyCount =
            countNonKeyAfterKey(inputIPacket, keyPacketFound, nonKeyAfterKeyCount);

        if (isStartDecoding(
            inputIPacket,
            inputIStreamCoder,
            keyPacketFound,
            nonKeyAfterKeyCount,
            onlyDecodeKeyPackets)) {

          int value =
              decodeVideo(
                  iVideoResamplers[streamIndex],
                  inputIVideoPictures[streamIndex],
                  outputIVideoPictures[streamIndex],
                  inputIPacket,
                  outputIPacket,
                  inputIStreamCoder,
                  outputIStreamCoder,
                  _outputIContainer,
                  null,
                  null,
                  0,
                  0,
                  timeStampOffset);

          if (value <= 0) {
            if (inputIPacket.isKey()) {
              throw new RuntimeException("Unable to decode video stream " + streamIndex);
            }

            onlyDecodeKeyPackets = true;

            continue;
          }
        } else {
          if (_log.isDebugEnabled()) {
            _log.debug("Do not decode video stream " + streamIndex);
          }
        }
      }

      previousPacketSize = inputIPacket.getSize();
    }

    flush(outputIStreamCoders, _outputIContainer);

    if (_outputIContainer.writeTrailer() < 0) {
      throw new RuntimeException("Unable to write trailer to output file");
    }

    cleanUp(iAudioResamplers, iVideoResamplers);
    cleanUp(inputIAudioSamples, outputIAudioSamples);
    cleanUp(inputIVideoPictures, outputIVideoPictures);
    cleanUp(inputIStreamCoders, outputIStreamCoders);
    cleanUp(inputIPacket, outputIPacket);
  }
  /**
   * Takes a media container (file) as the first argument, opens it, opens up a Swing window and
   * displays video frames with <i>roughly</i> the right timing.
   *
   * @param args Must contain one string which represents a filename
   */
  @SuppressWarnings("deprecation")
  public static void main(String[] args) {
    if (args.length <= 0)
      throw new IllegalArgumentException("must pass in a filename" + " as the first argument");

    String filename = args[0];

    // Let's make sure that we can actually convert video pixel formats.
    if (!IVideoResampler.isSupported(IVideoResampler.Feature.FEATURE_COLORSPACECONVERSION))
      throw new RuntimeException(
          "you must install the GPL version"
              + " of Xuggler (with IVideoResampler support) for "
              + "this demo to work");

    // Create a Xuggler container object
    IContainer container = IContainer.make();

    // Open up the container
    if (container.open(filename, IContainer.Type.READ, null) < 0)
      throw new IllegalArgumentException("could not open file: " + filename);

    // query how many streams the call to open found
    int numStreams = container.getNumStreams();

    // and iterate through the streams to find the first video stream
    int videoStreamId = -1;
    IStreamCoder videoCoder = null;
    for (int i = 0; i < numStreams; i++) {
      // Find the stream object
      IStream stream = container.getStream(i);
      // Get the pre-configured decoder that can decode this stream;
      IStreamCoder coder = stream.getStreamCoder();

      if (coder.getCodecType() == ICodec.Type.CODEC_TYPE_VIDEO) {
        videoStreamId = i;
        videoCoder = coder;
        break;
      }
    }
    if (videoStreamId == -1)
      throw new RuntimeException("could not find video stream in container: " + filename);

    /*
     * Now we have found the video stream in this file.  Let's open up our decoder so it can
     * do work.
     */
    if (videoCoder.open() < 0)
      throw new RuntimeException("could not open video decoder for container: " + filename);

    IVideoResampler resampler = null;
    if (videoCoder.getPixelType() != IPixelFormat.Type.BGR24) {
      // if this stream is not in BGR24, we're going to need to
      // convert it.  The VideoResampler does that for us.
      resampler =
          IVideoResampler.make(
              videoCoder.getWidth(),
              videoCoder.getHeight(),
              IPixelFormat.Type.BGR24,
              videoCoder.getWidth(),
              videoCoder.getHeight(),
              videoCoder.getPixelType());
      if (resampler == null)
        throw new RuntimeException("could not create color space " + "resampler for: " + filename);
    }
    /*
     * And once we have that, we draw a window on screen
     */
    openJavaWindow();

    /*
     * Now, we start walking through the container looking at each packet.
     */
    IPacket packet = IPacket.make();
    long firstTimestampInStream = Global.NO_PTS;
    long systemClockStartTime = 0;
    while (container.readNextPacket(packet) >= 0) {
      /*
       * Now we have a packet, let's see if it belongs to our video stream
       */
      if (packet.getStreamIndex() == videoStreamId) {
        /*
         * We allocate a new picture to get the data out of Xuggler
         */
        IVideoPicture picture =
            IVideoPicture.make(
                videoCoder.getPixelType(), videoCoder.getWidth(), videoCoder.getHeight());

        int offset = 0;
        while (offset < packet.getSize()) {
          /*
           * Now, we decode the video, checking for any errors.
           *
           */
          int bytesDecoded = videoCoder.decodeVideo(picture, packet, offset);
          if (bytesDecoded < 0)
            throw new RuntimeException("got error decoding video in: " + filename);
          offset += bytesDecoded;

          /*
           * Some decoders will consume data in a packet, but will not be able to construct
           * a full video picture yet.  Therefore you should always check if you
           * got a complete picture from the decoder
           */
          if (picture.isComplete()) {
            IVideoPicture newPic = picture;
            /*
             * If the resampler is not null, that means we didn't get the
             * video in BGR24 format and
             * need to convert it into BGR24 format.
             */
            if (resampler != null) {
              // we must resample
              newPic =
                  IVideoPicture.make(
                      resampler.getOutputPixelFormat(), picture.getWidth(), picture.getHeight());
              if (resampler.resample(newPic, picture) < 0)
                throw new RuntimeException("could not resample video from: " + filename);
            }
            if (newPic.getPixelType() != IPixelFormat.Type.BGR24)
              throw new RuntimeException(
                  "could not decode video" + " as BGR 24 bit data in: " + filename);

            /**
             * We could just display the images as quickly as we decode them, but it turns out we
             * can decode a lot faster than you think.
             *
             * <p>So instead, the following code does a poor-man's version of trying to match up the
             * frame-rate requested for each IVideoPicture with the system clock time on your
             * computer.
             *
             * <p>Remember that all Xuggler IAudioSamples and IVideoPicture objects always give
             * timestamps in Microseconds, relative to the first decoded item. If instead you used
             * the packet timestamps, they can be in different units depending on your IContainer,
             * and IStream and things can get hairy quickly.
             */
            if (firstTimestampInStream == Global.NO_PTS) {
              // This is our first time through
              firstTimestampInStream = picture.getTimeStamp();
              // get the starting clock time so we can hold up frames
              // until the right time.
              systemClockStartTime = System.currentTimeMillis();
            } else {
              long systemClockCurrentTime = System.currentTimeMillis();
              long millisecondsClockTimeSinceStartofVideo =
                  systemClockCurrentTime - systemClockStartTime;
              // compute how long for this frame since the first frame in the
              // stream.
              // remember that IVideoPicture and IAudioSamples timestamps are
              // always in MICROSECONDS,
              // so we divide by 1000 to get milliseconds.
              long millisecondsStreamTimeSinceStartOfVideo =
                  (picture.getTimeStamp() - firstTimestampInStream) / 1000;
              final long millisecondsTolerance = 50; // and we give ourselfs 50 ms of tolerance
              final long millisecondsToSleep =
                  (millisecondsStreamTimeSinceStartOfVideo
                      - (millisecondsClockTimeSinceStartofVideo + millisecondsTolerance));
              if (millisecondsToSleep > 0) {
                try {
                  Thread.sleep(millisecondsToSleep);
                } catch (InterruptedException e) {
                  // we might get this when the user closes the dialog box, so
                  // just return from the method.
                  return;
                }
              }
            }

            // And finally, convert the BGR24 to an Java buffered image
            BufferedImage javaImage = Utils.videoPictureToImage(newPic);

            // and display it on the Java Swing window
            updateJavaWindow(javaImage);
          }
        }
      } else {
        /*
         * This packet isn't part of our video stream, so we just
         * silently drop it.
         */
        do {} while (false);
      }
    }
    /*
     * Technically since we're exiting anyway, these will be cleaned up by
     * the garbage collector... but because we're nice people and want
     * to be invited places for Christmas, we're going to show how to clean up.
     */
    if (videoCoder != null) {
      videoCoder.close();
      videoCoder = null;
    }
    if (container != null) {
      container.close();
      container = null;
    }
    closeJavaWindow();
  }
Example #4
0
  public void decode() {

    int success = videoStream.open();
    if (success < 0) {

      throw new RuntimeException(
          "XUGGLER DECODER: could not open video decoder for container: "
              + input.getLocation().getDecodedURL());
    }

    IAudioSamples decodeSamples = null;

    if (audioStream != null) {

      success = audioStream.open();
      if (success < 0) {

        throw new RuntimeException(
            "XUGGLER DECODER: could not open audio decoder for container: "
                + input.getLocation().getDecodedURL());
      }

      decodeSamples = IAudioSamples.make(1024, audioStream.getChannels());
    }

    IVideoPicture decodePicture =
        IVideoPicture.make(
            videoStream.getPixelType(), videoStream.getWidth(), videoStream.getHeight());

    while (container.readNextPacket(packet) >= 0 && decodeMode != DecodeMode.STOP) {

      /** Find out if this stream has a starting timestamp */
      IStream stream = container.getStream(packet.getStreamIndex());
      long tsOffset = 0;
      if (stream.getStartTime() != Global.NO_PTS
          && stream.getStartTime() > 0
          && stream.getTimeBase() != null) {
        IRational defTimeBase = IRational.make(1, (int) Global.DEFAULT_PTS_PER_SECOND);
        tsOffset = defTimeBase.rescale(stream.getStartTime(), stream.getTimeBase());
      }

      /*
       * Now we have a packet, let's see if it belongs to our video stream
       */
      if (packet.getStreamIndex() == videoStreamIndex) {

        int offset = 0;
        while (offset < packet.getSize()) {
          /*
           * Now, we decode the video, checking for any errors.
           *
           */
          int bytesDecoded = videoStream.decodeVideo(decodePicture, packet, offset);
          if (bytesDecoded < 0) {

            throw new RuntimeException(
                "XUGGLER: error decoding video in: " + input.getLocation().getDecodedURL());
          }

          if (decodePicture.getTimeStamp() != Global.NO_PTS) {

            decodePicture.setTimeStamp(decodePicture.getTimeStamp() - tsOffset);
          }

          offset += bytesDecoded;
          /*
           * Some decoders will consume data in a packet, but will not be able to construct
           * a full video picture yet.  Therefore you should always check if you
           * got a complete picture from the decoder
           */
          if (decodePicture.isComplete()) {

            decodedPicture(decodePicture);
          }
        }

      } else if (audioStream != null
          && packet.getStreamIndex() == audioStreamIndex
          && decodeMode != DecodeMode.IGNORE_AUDIO) {

        /*
         * A packet can actually contain multiple sets of samples (or frames of samples
         * in audio-decoding speak).  So, we may need to call decode audio multiple
         * times at different offsets in the packet's data.  We capture that here.
         */
        int offset = 0;

        /*
         * Keep going until we've processed all data
         */
        while (offset < packet.getSize()) {
          int bytesDecoded = audioStream.decodeAudio(decodeSamples, packet, offset);
          if (bytesDecoded < 0) {
            break;
            // throw new RuntimeException("XUGGLER: got error decoding audio in: " +
            // inputVideoFile);

          }

          if (decodeSamples.getTimeStamp() != Global.NO_PTS) {

            decodeSamples.setTimeStamp(decodeSamples.getTimeStamp() - tsOffset);
          }

          offset += bytesDecoded;
          /*
           * Some decoder will consume data in a packet, but will not be able to construct
           * a full set of samples yet.  Therefore you should always check if you
           * got a complete set of samples from the decoder
           */
          if (decodeSamples.isComplete()) {

            decodedAudioSamples(decodeSamples);
          }
        }

      } else {

        /*
         * This packet isn't part of our video stream, so we just
         * silently drop it.
         */
        continue;
      }
    }
  }
  /**
   * The playSong method is responsible for opening a Xuggler container to play song at provided
   * location.
   *
   * @param songURL The location of the song to play (local file path or url)
   */
  public void playSong(String songURL) {

    IContainer container = IContainer.make();

    IContainerFormat format = IContainerFormat.make();

    // Stream format must currently be mp3
    format.setInputFormat("mp3");

    //		int s = container.setInputBufferLength(6270);
    //
    //		if(s < 0){
    //			logger.warn("Input buffer was not set to desired length");
    //		}

    // Probe size value must be >50 for some reason. Native libraries throw an exception if it's
    // <50. Measured in bytes.
    if (container.setProperty("probesize", 50) < 0) {
      logger.warn("Probe size not set for input container.");
    }

    if (container.setProperty("analyzeduration", 1) < 0) {
      logger.warn("Analyze duration not changed for input container.");
    }

    container.setFlag(IContainer.Flags.FLAG_NONBLOCK, true);

    if (container.open(songURL, Type.READ, format, true, false) < 0) {
      throw new IllegalArgumentException("stream not found");
    }

    int numStreams = container.getNumStreams();

    // long streamRec = System.currentTimeMillis();

    logger.info("Number of Audio streams detected {}", numStreams);

    IPacket packet = IPacket.make();
    IStream stream = null;
    IStreamCoder audioCoder = null;

    Map<Integer, IStreamCoder> knownStreams = new HashMap<Integer, IStreamCoder>();

    long previousValue = 0;

    while (container.readNextPacket(packet) >= 0 && alive) {

      if (packet.isComplete()) {

        if (knownStreams.get(packet.getStreamIndex()) == null) {
          container.queryStreamMetaData(); // This method tends to take awhile when reading a stream
          stream = container.getStream(packet.getStreamIndex());
          knownStreams.put(packet.getStreamIndex(), stream.getStreamCoder());

          audioCoder = knownStreams.get(packet.getStreamIndex());

          audioCoder.setTimeBase(stream.getTimeBase());
        }

        if (!audioCoder.isOpen()) {
          if (audioCoder.open(null, null) < 0) {
            throw new RuntimeException("could not open audio decoder for container");
          }

          openSound(audioCoder);

          // System.out.println("Opening sound  " + (System.currentTimeMillis() - streamRec));
        }

        // System.err.println(audioCoder.getNumDroppedFrames());

        int offset = 0;

        IAudioSamples samples = IAudioSamples.make(1024, audioCoder.getChannels());

        while (offset < packet.getSize() && alive) {

          // Wait until the state is playing
          while (state != PlayBack_State.PLAYING) {

            if (state == PlayBack_State.TEARDOWN) {
              break;
            } else {
              try {
                synchronized (LOCK_OBJECT) {
                  // mLine.drain();
                  mLine.flush();
                  mLine.stop();

                  LOCK_OBJECT.wait();

                  mLine.start();
                }
              } catch (InterruptedException e) {
                logger.error("", e);
              }
            }
          }

          int bytesDecoded = audioCoder.decodeAudio(samples, packet, offset);

          if (bytesDecoded < 0) {
            logger.warn("Error occurred decoding audio");
            break;
            // throw new RuntimeException("got error decoding audio");
          }

          offset += bytesDecoded;

          if (samples.isComplete() && alive) {
            playJavaSound(samples);
          }

          // Send the time stamp to the GUI for updating the progress bar
          long newValue = (long) (packet.getTimeStamp() * packet.getTimeBase().getValue());

          // Update GUI every second that the stream is playing
          if (newValue > previousValue) {
            callback.notifyGUISongProgress(newValue);
            callback.isStreaming(true);
            previousValue = newValue;

            if (newValue == streamInfo.getSongDuration()) {
              alive = false;
            }
          }
        }
      }
    }

    closeJavaSound();

    if (audioCoder != null) {
      audioCoder.close();
      audioCoder = null;
    }
    if (container != null) {
      container.close();
      container = null;
    }
  }