/** push bytes back, to be read again later. */
 private void pushback(byte[] bytes, int len) {
   if (pushbackBufferLen == 0) {
     pushbackBuffer = bytes; // TODO: copy?
     pushbackBufferLen = len;
     pushbackBufferOffset = 0;
   } else {
     final byte[] newPushbackBuffer = new byte[pushbackBufferLen + len];
     System.arraycopy(pushbackBuffer, 0, newPushbackBuffer, 0, pushbackBufferLen);
     System.arraycopy(bytes, 0, newPushbackBuffer, pushbackBufferLen, len);
     pushbackBuffer = newPushbackBuffer;
     pushbackBufferLen = pushbackBufferLen + len;
     pushbackBufferOffset = 0;
   }
 }
  public static void main(String[] args) {
    String arg[] = {"-o", "file:/c:/foo.wav", "file:/c:/1.mp3", "file:/c:/1.wav"};
    args = arg;
    Vector<String> inputURL = new Vector<String>();
    String outputURL = null;

    if (args.length == 0) {
      prUsage();
    }

    // Parse the arguments.
    int i = 0;
    while (i < args.length) {

      if (args[i].equals("-o")) {
        i++;
        if (i >= args.length) prUsage();
        outputURL = args[i];
      } else {
        inputURL.addElement(args[i]);
      }
      i++;
    }

    if (inputURL.size() == 0) {
      System.err.println("No input url is specified");
      prUsage();
    }

    if (outputURL == null) {
      System.err.println("No output url is specified");
      prUsage();
    }

    // Generate the input and output media locators.
    MediaLocator iml[] = new MediaLocator[inputURL.size()];
    MediaLocator oml;

    for (i = 0; i < inputURL.size(); i++) {
      if ((iml[i] = createMediaLocator(inputURL.elementAt(i))) == null) {
        System.err.println("Cannot build media locator from: " + inputURL);
        System.exit(0);
      }
    }

    if ((oml = createMediaLocator(outputURL)) == null) {
      System.err.println("Cannot build media locator from: " + outputURL);
      System.exit(0);
    }

    FilesConcator concat = new FilesConcator();

    if (!concat.doIt(iml, oml)) {
      System.err.println("Failed to concatenate the inputs");
    }

    System.exit(0);
  }
示例#3
0
  /**
   * Blocks and reads into a <tt>Buffer</tt> from this <tt>PullBufferStream</tt>.
   *
   * @param buffer the <tt>Buffer</tt> this <tt>PullBufferStream</tt> is to read into
   * @throws IOException if an I/O error occurs while this <tt>PullBufferStream</tt> reads into the
   *     specified <tt>Buffer</tt>
   * @see AbstractVideoPullBufferStream#doRead(Buffer)
   */
  @Override
  protected void doRead(Buffer buffer) throws IOException {
    /*
     * Determine the Format in which we're expected to output. We cannot
     * rely on the Format always being specified in the Buffer because it is
     * not its responsibility, the DataSource of this ImageStream knows the
     * output Format.
     */
    Format format = buffer.getFormat();

    if (format == null) {
      format = getFormat();
      if (format != null) buffer.setFormat(format);
    }

    if (format instanceof AVFrameFormat) {
      Object o = buffer.getData();
      AVFrame frame;

      if (o instanceof AVFrame) frame = (AVFrame) o;
      else {
        frame = new AVFrame();
        buffer.setData(frame);
      }

      AVFrameFormat avFrameFormat = (AVFrameFormat) format;
      Dimension size = avFrameFormat.getSize();
      ByteBuffer data = readScreenNative(size);

      if (data != null) {
        if (frame.avpicture_fill(data, avFrameFormat) < 0) {
          data.free();
          throw new IOException("avpicture_fill");
        }
      } else {
        /*
         * This can happen when we disconnect a monitor from computer
         * before or during grabbing.
         */
        throw new IOException("Failed to grab screen.");
      }
    } else {
      byte[] bytes = (byte[]) buffer.getData();
      Dimension size = ((VideoFormat) format).getSize();

      bytes = readScreen(bytes, size);

      buffer.setData(bytes);
      buffer.setOffset(0);
      buffer.setLength(bytes.length);
    }

    buffer.setHeader(null);
    buffer.setTimeStamp(System.nanoTime());
    buffer.setSequenceNumber(seqNo);
    buffer.setFlags(Buffer.FLAG_SYSTEM_TIME | Buffer.FLAG_LIVE_DATA);
    seqNo++;
  }
 /** Controller Listener. */
 @Override
 public void controllerUpdate(ControllerEvent evt) {
   if (evt instanceof ControllerErrorEvent) {
     System.err.println("Failed to concatenate the files.");
     System.exit(-1);
   } else if (evt instanceof EndOfMediaEvent) {
     evt.getSourceController().close();
   }
 }
示例#5
0
      public int write(byte[] buffer, int offset, int length, boolean transform) {
        RawPacket pkt = rawPacketArray[0];
        if (pkt == null) pkt = new RawPacket();
        rawPacketArray[0] = pkt;

        byte[] pktBuf = pkt.getBuffer();
        if (pktBuf == null || pktBuf.length < length) {
          pktBuf = new byte[length];
          pkt.setBuffer(pktBuf);
        }
        System.arraycopy(buffer, offset, pktBuf, 0, length);
        pkt.setOffset(0);
        pkt.setLength(length);

        if (transform) {
          PacketTransformer packetTransformer =
              isControlStream ? rtcpPacketTransformer : rtpPacketTransformer;

          if (packetTransformer != null)
            rawPacketArray = packetTransformer.reverseTransform(rawPacketArray);
        }

        SourceTransferHandler transferHandler;
        PushSourceStream pushSourceStream;

        try {
          if (isControlStream) {
            transferHandler = controlTransferHandler;
            pushSourceStream = getControlInputStream();
          } else {
            transferHandler = dataTransferHandler;
            pushSourceStream = getDataInputStream();
          }
        } catch (IOException ioe) {
          throw new UndeclaredThrowableException(ioe);
        }

        for (int i = 0; i < rawPacketArray.length; i++) {
          RawPacket packet = rawPacketArray[i];

          // keep the first element for reuse
          if (i != 0) rawPacketArray[i] = null;

          if (packet != null) {
            if (isControlStream) pendingControlPacket = packet;
            else pendingDataPacket = packet;

            if (transferHandler != null) {
              transferHandler.transferData(pushSourceStream);
            }
          }
        }

        return length;
      }
示例#6
0
  /**
   * ************************************************************** Sample Usage for VideoTransmit
   * class **************************************************************
   */
  public static void main(String[] args) {
    // We need three parameters to do the transmission
    // For example,
    //   java VideoTransmit file:/C:/media/test.mov  129.130.131.132 42050

    if (args.length < 3) {
      System.err.println("Usage: VideoTransmit <sourceURL> <destIP> <destPort>");
      System.exit(-1);
    }

    // Create a video transmit object with the specified params.
    VideoTransmit vt = new VideoTransmit(new MediaLocator(args[0]), args[1], args[2]);
    // Start the transmission
    String result = vt.start();

    // result will be non-null if there was an error. The return
    // value is a String describing the possible error. Print it.
    if (result != null) {
      System.err.println("Error : " + result);
      System.exit(0);
    }

    System.err.println("Start transmission for 60 seconds...");

    // Transmit for 60 seconds and then close the processor
    // This is a safeguard when using a capture data source
    // so that the capture device will be properly released
    // before quitting.
    // The right thing to do would be to have a GUI with a
    // "Stop" button that would call stop on VideoTransmit
    try {
      Thread.currentThread().sleep(60000);
    } catch (InterruptedException ie) {
    }

    // Stop the transmission
    vt.stop();

    System.err.println("...transmission ended.");

    System.exit(0);
  }
 /** supports pushback. */
 private int read(byte[] buffer, int offset, int length) throws IOException {
   if (pushbackBufferLen > 0) { // read from pushback buffer
     final int lenToCopy = length < pushbackBufferLen ? length : pushbackBufferLen;
     System.arraycopy(pushbackBuffer, pushbackBufferOffset, buffer, offset, lenToCopy);
     pushbackBufferLen -= lenToCopy;
     pushbackBufferOffset += lenToCopy;
     return lenToCopy;
   } else {
     return stream.read(buffer, offset, length);
   }
 }
示例#8
0
 /**
  * Implements {@link ActiveSpeakerChangedListener#activeSpeakerChanged(long)}. Notifies this
  * <tt>RecorderRtpImpl</tt> that the audio <tt>ReceiveStream</tt> considered active has changed,
  * and that the new active stream has SSRC <tt>ssrc</tt>.
  *
  * @param ssrc the SSRC of the new active stream.
  */
 @Override
 public void activeSpeakerChanged(long ssrc) {
   if (eventHandler != null) {
     RecorderEvent e = new RecorderEvent();
     e.setAudioSsrc(ssrc);
     // TODO: how do we time this?
     e.setInstant(System.currentTimeMillis());
     e.setType(RecorderEvent.Type.SPEAKER_CHANGED);
     e.setMediaType(MediaType.VIDEO);
     eventHandler.handleEvent(e);
   }
 }
示例#9
0
  public synchronized void setPacketSize(int newPacketSize) {
    packetSize = newPacketSize;

    sample_count = packetSize;

    if (history == null) {
      history = new byte[packetSize];
      return;
    }

    if (packetSize > history.length) {
      byte[] newHistory = new byte[packetSize];
      System.arraycopy(history, 0, newHistory, 0, historyLength);
      history = newHistory;
    }
  }
示例#10
0
      @Override
      public int read(byte[] buffer, int offset, int length) throws IOException {

        RawPacket pendingPacket;
        if (isControlStream) {
          pendingPacket = pendingControlPacket;
        } else {
          pendingPacket = pendingDataPacket;
        }
        int bytesToRead = 0;
        byte[] pendingPacketBuffer = pendingPacket.getBuffer();
        if (pendingPacketBuffer != null) {
          int pendingPacketLength = pendingPacket.getLength();
          bytesToRead = length > pendingPacketLength ? pendingPacketLength : length;
          System.arraycopy(
              pendingPacketBuffer, pendingPacket.getOffset(), buffer, offset, bytesToRead);
        }
        return bytesToRead;
      }
  /** Create a media locator from the given string. */
  @SuppressWarnings("unused")
  static MediaLocator createMediaLocator(String url) {
    MediaLocator ml;

    if (url.indexOf(":") > 0 && (ml = new MediaLocator(url)) != null) {
      return ml;
    }

    if (url.startsWith(File.separator)) {
      if ((ml = new MediaLocator("file:" + url)) != null) {
        return ml;
      }
    } else {
      String file = "file:" + System.getProperty("user.dir") + File.separator + url;
      if ((ml = new MediaLocator(file)) != null) {
        return ml;
      }
    }

    return null;
  }
示例#12
0
  public static void main(String[] args) {

    // ---------------- CUT HERE START ----------------- //

    Format formats[] = new Format[2];
    formats[0] = new AudioFormat(AudioFormat.IMA4);
    formats[1] = new VideoFormat(VideoFormat.CINEPAK);
    FileTypeDescriptor outputType = new FileTypeDescriptor(FileTypeDescriptor.QUICKTIME);
    Processor p = null;

    try {
      p = Manager.createRealizedProcessor(new ProcessorModel(formats, outputType));
    } catch (IOException e) {
      System.exit(-1);
    } catch (NoProcessorException e) {
      System.exit(-1);
    } catch (CannotRealizeException e) {
      System.exit(-1);
    }
    // get the output of the processor
    DataSource source = p.getDataOutput();
    // create a File protocol MediaLocator with the location of the file to
    // which bits are to be written
    MediaLocator dest = new MediaLocator("file://foo.mov");
    // create a datasink to do the file writing & open the sink to make sure
    // we can write to it.
    DataSink filewriter = null;
    try {
      filewriter = Manager.createDataSink(source, dest);
      filewriter.open();
    } catch (NoDataSinkException e) {
      System.exit(-1);
    } catch (IOException e) {
      System.exit(-1);
    } catch (SecurityException e) {
      System.exit(-1);
    }
    // now start the filewriter and processor
    try {
      filewriter.start();
    } catch (IOException e) {
      System.exit(-1);
    }
    p.start();
    // stop and close the processor when done capturing...
    // close the datasink when EndOfStream event is received...

    // ----------------- CUT HERE END ---------------- //
    try {
      Thread.currentThread().sleep(4000);
    } catch (InterruptedException ie) {
    }
    p.stop();
    p.close();
    try {
      Thread.currentThread().sleep(1000);
    } catch (InterruptedException ie) {
    }
    filewriter.close();
    try {
      Thread.currentThread().sleep(4000);
    } catch (InterruptedException ie) {
    }

    System.exit(0);
  }
 static void prUsage() {
   System.err.println("Usage: java Concat -o <output> <input> ...");
   System.err.println("     <output>: input URL or file name");
   System.err.println("     <input>: output URL or file name");
   System.exit(0);
 }
示例#14
0
  /** {@inheritDoc} */
  @Override
  protected int doProcess(Buffer inBuffer, Buffer outBuffer) {
    byte[] inData = (byte[]) inBuffer.getData();
    int inOffset = inBuffer.getOffset();

    if (!VP8PayloadDescriptor.isValid(inData, inOffset)) {
      logger.warn("Invalid RTP/VP8 packet discarded.");
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK?
    }

    long inSeq = inBuffer.getSequenceNumber();
    long inRtpTimestamp = inBuffer.getRtpTimeStamp();
    int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset);
    boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0;
    boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset);
    int inLength = inBuffer.getLength();
    int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset);
    int inPayloadLength = inLength - inPdSize;

    if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) {
      if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    if (!empty) {
      // if the incoming packet has a different PictureID or timestamp
      // than those of the current frame, then it belongs to a different
      // frame.
      if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId)
          | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) {
        if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq
        {
          // the packet belongs to a previous frame. discard it
          if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq);
          outBuffer.setDiscard(true);
          return BUFFER_PROCESSED_OK;
        } else // inSeq > firstSeq (and also presumably isSeq > lastSeq)
        {
          // the packet belongs to a subsequent frame (to the one
          // currently being held). Drop the current frame.

          if (logger.isInfoEnabled())
            logger.info(
                "Discarding saved packets on arrival of"
                    + " a packet for a subsequent frame: "
                    + inSeq);

          // TODO: this would be the place to complain about the
          // not-well-received PictureID by sending a RTCP SLI or NACK.
          reinit();
        }
      }
    }

    // a whole frame in a single packet. avoid the extra copy to
    // this.data and output it immediately.
    if (empty && inMarker && inIsStartOfFrame) {
      byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false);
      System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength);
      outBuffer.setOffset(0);
      outBuffer.setLength(inPayloadLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);

      lastSentSeq = inSeq;

      return BUFFER_PROCESSED_OK;
    }

    // add to this.data
    Container container = free.poll();
    if (container == null) container = new Container();
    if (container.buf == null || container.buf.length < inPayloadLength)
      container.buf = new byte[inPayloadLength];

    if (data.get(inSeq) != null) {
      if (logger.isInfoEnabled())
        logger.info("(Probable) duplicate packet detected, discarding " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength);
    container.len = inPayloadLength;
    data.put(inSeq, container);

    // update fields
    frameLength += inPayloadLength;
    if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq;
    if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq;

    if (empty) {
      // the first received packet for the current frame was just added
      empty = false;
      timestamp = inRtpTimestamp;
      pictureId = inPictureId;
    }

    if (inMarker) haveEnd = true;
    if (inIsStartOfFrame) haveStart = true;

    // check if we have a full frame
    if (frameComplete()) {
      byte[] outData = validateByteArraySize(outBuffer, frameLength, false);
      int ptr = 0;
      Container b;
      for (Map.Entry<Long, Container> entry : data.entrySet()) {
        b = entry.getValue();
        System.arraycopy(b.buf, 0, outData, ptr, b.len);
        ptr += b.len;
      }

      outBuffer.setOffset(0);
      outBuffer.setLength(frameLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);
      lastSentSeq = lastSeq;

      // prepare for the next frame
      reinit();

      return BUFFER_PROCESSED_OK;
    } else {
      // frame not complete yet
      outBuffer.setDiscard(true);
      return OUTPUT_BUFFER_NOT_FILLED;
    }
  }
示例#15
0
  /**
   * Implements {@link ControllerListener#controllerUpdate(ControllerEvent)}. Handles events from
   * the <tt>Processor</tt>s that this instance uses to transcode media.
   *
   * @param ev the event to handle.
   */
  public void controllerUpdate(ControllerEvent ev) {
    if (ev == null || ev.getSourceController() == null) {
      return;
    }

    Processor processor = (Processor) ev.getSourceController();
    ReceiveStreamDesc desc = findReceiveStream(processor);

    if (desc == null) {
      logger.warn("Event from an orphaned processor, ignoring: " + ev);
      return;
    }

    if (ev instanceof ConfigureCompleteEvent) {
      if (logger.isInfoEnabled()) {
        logger.info(
            "Configured processor for ReceiveStream ssrc="
                + desc.ssrc
                + " ("
                + desc.format
                + ")"
                + " "
                + System.currentTimeMillis());
      }

      boolean audio = desc.format instanceof AudioFormat;

      if (audio) {
        ContentDescriptor cd = processor.setContentDescriptor(AUDIO_CONTENT_DESCRIPTOR);
        if (!AUDIO_CONTENT_DESCRIPTOR.equals(cd)) {
          logger.error(
              "Failed to set the Processor content "
                  + "descriptor to "
                  + AUDIO_CONTENT_DESCRIPTOR
                  + ". Actual result: "
                  + cd);
          removeReceiveStream(desc, false);
          return;
        }
      }

      for (TrackControl track : processor.getTrackControls()) {
        Format trackFormat = track.getFormat();

        if (audio) {
          final long ssrc = desc.ssrc;
          SilenceEffect silenceEffect;
          if (Constants.OPUS_RTP.equals(desc.format.getEncoding())) {
            silenceEffect = new SilenceEffect(48000);
          } else {
            // We haven't tested that the RTP timestamps survive
            // the journey through the chain when codecs other than
            // opus are in use, so for the moment we rely on FMJ's
            // timestamps for non-opus formats.
            silenceEffect = new SilenceEffect();
          }

          silenceEffect.setListener(
              new SilenceEffect.Listener() {
                boolean first = true;

                @Override
                public void onSilenceNotInserted(long timestamp) {
                  if (first) {
                    first = false;
                    // send event only
                    audioRecordingStarted(ssrc, timestamp);
                  } else {
                    // change file and send event
                    resetRecording(ssrc, timestamp);
                  }
                }
              });
          desc.silenceEffect = silenceEffect;
          AudioLevelEffect audioLevelEffect = new AudioLevelEffect();
          audioLevelEffect.setAudioLevelListener(
              new SimpleAudioLevelListener() {
                @Override
                public void audioLevelChanged(int level) {
                  activeSpeakerDetector.levelChanged(ssrc, level);
                }
              });

          try {
            // We add an effect, which will insert "silence" in
            // place of lost packets.
            track.setCodecChain(new Codec[] {silenceEffect, audioLevelEffect});
          } catch (UnsupportedPlugInException upie) {
            logger.warn("Failed to insert silence effect: " + upie);
            // But do go on, a recording without extra silence is
            // better than nothing ;)
          }
        } else {
          // transcode vp8/rtp to vp8 (i.e. depacketize vp8)
          if (trackFormat.matches(vp8RtpFormat)) track.setFormat(vp8Format);
          else {
            logger.error("Unsupported track format: " + trackFormat + " for ssrc=" + desc.ssrc);
            // we currently only support vp8
            removeReceiveStream(desc, false);
            return;
          }
        }
      }

      processor.realize();
    } else if (ev instanceof RealizeCompleteEvent) {
      desc.dataSource = processor.getDataOutput();

      long ssrc = desc.ssrc;
      boolean audio = desc.format instanceof AudioFormat;
      String suffix = audio ? AUDIO_FILENAME_SUFFIX : VIDEO_FILENAME_SUFFIX;

      // XXX '\' on windows?
      String filename = getNextFilename(path + "/" + ssrc, suffix);
      desc.filename = filename;

      DataSink dataSink;
      if (audio) {
        try {
          dataSink = Manager.createDataSink(desc.dataSource, new MediaLocator("file:" + filename));
        } catch (NoDataSinkException ndse) {
          logger.error("Could not create DataSink: " + ndse);
          removeReceiveStream(desc, false);
          return;
        }

      } else {
        dataSink = new WebmDataSink(filename, desc.dataSource);
      }

      if (logger.isInfoEnabled())
        logger.info(
            "Created DataSink ("
                + dataSink
                + ") for SSRC="
                + ssrc
                + ". Output filename: "
                + filename);
      try {
        dataSink.open();
      } catch (IOException e) {
        logger.error("Failed to open DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ": " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (!audio) {
        final WebmDataSink webmDataSink = (WebmDataSink) dataSink;
        webmDataSink.setSsrc(ssrc);
        webmDataSink.setEventHandler(eventHandler);
        webmDataSink.setKeyFrameControl(
            new KeyFrameControlAdapter() {
              @Override
              public boolean requestKeyFrame(boolean urgent) {
                return requestFIR(webmDataSink);
              }
            });
      }

      try {
        dataSink.start();
      } catch (IOException e) {
        logger.error(
            "Failed to start DataSink (" + dataSink + ") for" + " SSRC=" + ssrc + ". " + e);
        removeReceiveStream(desc, false);
        return;
      }

      if (logger.isInfoEnabled()) logger.info("Started DataSink for SSRC=" + ssrc);

      desc.dataSink = dataSink;

      processor.start();
    } else if (logger.isDebugEnabled()) {
      logger.debug(
          "Unhandled ControllerEvent from the Processor for ssrc=" + desc.ssrc + ": " + ev);
    }
  }
    @Override
    public void keyPressed(KeyEvent arg0) {
      if (arg0.getKeyCode() == 67) // C
      {
        // Are we allowed to capture a new image?
        if (framenr - lastcapture_framenr > number_of_frames_betweencaptures) {
          captureImage();
          cw.setVisible(false);
          if (timer != null) timer.cancel();

          cwText.setText(""); // Empty text
          lastcapture_framenr = framenr;
          cwTimer.cancel();

          if (number_of_second_showcapturetext > 0) {
            cwText.setText("Bildet ble lagret ...");

            TimerTask task =
                new TimerTask() {

                  @Override
                  public void run() {
                    EventQueue.invokeLater(
                        new Runnable() {
                          public void run() {
                            cwText.setText(""); // Empty text
                          }
                        });
                  }
                };
            timer = new Timer();
            timer.schedule(task, (int) (number_of_second_showcapturetext * 1000));
          }
        } else {
          // Console debug:
          // System.out.println("At framenr " + framenr + ", " + (framenr-lastcapture_framenr) +
          //		"frames has passed, should be " +number_of_frames_betweencaptures);
          // System.out.println("You must wait an other "+
          //
          //	(int)Math.ceil(((double)number_of_frames_betweencaptures-(double)(framenr-lastcapture_framenr))/fps) +
          //		" seconds");

          TimerTask task =
              new TimerTask() {

                boolean finished = false;

                @Override
                public void run() {
                  EventQueue.invokeLater(
                      new Runnable() {
                        public void run() {
                          if (!finished) {
                            if ((int)
                                    Math.ceil(
                                        ((double) number_of_frames_betweencaptures
                                                - (double) (framenr - lastcapture_framenr))
                                            / fps)
                                > 0) {
                              cwText.setText(
                                  "Du må vente "
                                      + (int)
                                          Math.ceil(
                                              ((double) number_of_frames_betweencaptures
                                                      - (double) (framenr - lastcapture_framenr))
                                                  / fps)
                                      + " sekunder før nytt bilde");
                            } else {
                              finished = true;
                              cwText.setText("Du kan nå ta nytt bilde");
                            }
                          }
                        }
                      });
                }
              };
          timer = new Timer();
          timer.schedule(task, 0, (1000 / fps)); // Update for every frame
        }
      } else if (arg0.getKeyCode() == 27) // Escape
      {
        System.out.println("Escape pressed, exiting");
        System.exit(0);
      }
    }
  public int process(Buffer inputBuffer, Buffer outputBuffer) {

    if (pendingFrames > 0) {
      // System.out.println("packetizing");
      return BUFFER_PROCESSED_OK;
    }

    if (!checkInputBuffer(inputBuffer)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(inputBuffer)) {
      propagateEOM(outputBuffer);
      return BUFFER_PROCESSED_OK;
    }

    int inpOffset = inputBuffer.getOffset();
    int inpLength = inputBuffer.getLength();
    int outLength = 0;
    int outOffset = 0;
    byte[] inpData = (byte[]) inputBuffer.getData();
    byte[] outData =
        validateByteArraySize(outputBuffer, calculateOutputSize(inpData.length + historySize));
    int historyLength = history.getLength();
    byte[] historyData = validateByteArraySize(history, historySize);
    int framesNumber = calculateFramesNumber(inpData.length + historySize);

    if ((regions == null) || (regions.length < framesNumber + 1))
      regions = new int[framesNumber + 1];

    if ((regionsTypes == null) || (regionsTypes.length < framesNumber))
      regionsTypes = new int[framesNumber];

    if (historyLength != 0) {
      int bytesToCopy = (historyData.length - historyLength);
      if (bytesToCopy > inpLength) {
        bytesToCopy = inpLength;
      }

      System.arraycopy(inpData, inpOffset, historyData, historyLength, bytesToCopy);

      codecProcess(
          historyData,
          0,
          outData,
          outOffset,
          historyLength + bytesToCopy,
          readBytes,
          writeBytes,
          frameNumber,
          regions,
          regionsTypes);

      if (readBytes[0] <= 0) {
        if (writeBytes[0] <= 0) {
          // System.err.println("Returning output buffer not filled");
          return OUTPUT_BUFFER_NOT_FILLED;
        } else {
          updateOutput(outputBuffer, outputFormat, writeBytes[0], 0);
          // System.err.println("Returning OK");
          return BUFFER_PROCESSED_OK;
        }
      }

      // System.out.println("1: "+inpLength+" "+readBytes[0]+" "+writeBytes[0]);

      outOffset += writeBytes[0];
      outLength += writeBytes[0];

      inpOffset += (readBytes[0] - historyLength);
      inpLength += (historyLength - readBytes[0]);
    }

    codecProcess(
        inpData,
        inpOffset,
        outData,
        outOffset,
        inpLength,
        readBytes,
        writeBytes,
        frameNumber,
        regions,
        regionsTypes);
    // System.out.println("2: "+inpLength+" "+readBytes[0]+" "+writeBytes[0]);

    // debug
    // for (int i=0; i<frameNumber[0];i++ ) {
    // System.out.println(i+" "+regions[i]+" - "+regions[i+1]+" type "+regionsTypes[i]);
    // }

    outLength += writeBytes[0];

    inpOffset += readBytes[0];
    inpLength -= readBytes[0];

    System.arraycopy(inpData, inpOffset, historyData, 0, inpLength);
    history.setLength(inpLength);

    updateOutput(outputBuffer, outputFormat, outLength, 0);

    return BUFFER_PROCESSED_OK;
  }
  @Override
  public void keyPressed(KeyEvent e) {
    if (e.getKeyCode() == 67) // C
    {
      if (captureWindow) {
        this.openCaptureWindow();
      } else {
        this.captureImage();
      }
    } else if (e.getKeyCode() == 27) // Escape
    {
      System.out.println("Escape pressed, exiting");
      System.exit(0);
    } else if (e.getKeyCode() == 84) // t
    {
      // Testing purpose
      for (int i = 0; i < images.size(); i++) {
        System.out.println("image " + i + ", used? " + images_used.contains((Integer) i));
      }

    } else if (e.getKeyCode() == 89) // y
    {
      // Testing purpose
      System.out.println("getRandomImageNum() = " + getRandomImageNum());
    } else if (e.getKeyCode() == 73) // i
    {
      // Testing purpose
      System.out.println("LAST ADDED");
      for (int i = 0; i < images_lastadded.size(); i++) {
        System.out.println(
            i
                + " - image "
                + images_lastadded.get(i)
                + ", used? "
                + images_used.contains((Integer) images_lastadded.get(i)));
      }
    } else if (e.getKeyCode() == 85) // u
    {
      // Testing purpose
      for (int i = 0; i < imagepanels.length; i++) {
        for (int j = 0; j < imagepanels[i].imagenum_now.length; j++) {
          for (int j2 = 0; j2 < imagepanels[i].imagenum_now[j].length; j2++) {
            String print1;
            if (imagepanels[i].imagenum_now[j][j2] < 10)
              print1 = "  " + imagepanels[i].imagenum_now[j][j2];
            else if (imagepanels[i].imagenum_now[j][j2] < 100)
              print1 = " " + imagepanels[i].imagenum_now[j][j2];
            else print1 = "" + imagepanels[i].imagenum_now[j][j2];
            String print2;
            if (imagepanels[i].imagenum_next[j][j2] < 10)
              print2 = "  " + imagepanels[i].imagenum_next[j][j2];
            else if (imagepanels[i].imagenum_next[j][j2] < 100)
              print2 = " " + imagepanels[i].imagenum_next[j][j2];
            else print2 = "" + imagepanels[i].imagenum_next[j][j2];

            System.out.println(
                "imagepanels["
                    + i
                    + "]."
                    + "imagenum_now["
                    + j
                    + "]["
                    + j2
                    + "] = "
                    + print1
                    + ", next = "
                    + print2);
          }
        }
      }
    } else {
      displayInfo(e, "KEY TYPED: ");
    }
  }
  public WebcamCaptureAndFadePanel(String saveDir, String layout) {

    System.out.println("Using " + saveDir + " as directory for the images.");
    saveDirectory = saveDir;

    getImages();
    images_used = new ArrayList<Integer>();
    images_lastadded = new ArrayList<Integer>();
    images_nevershown = new ArrayList<Integer>();

    Vector devices = (Vector) CaptureDeviceManager.getDeviceList(null).clone();
    Enumeration enumeration = devices.elements();
    System.out.println("- Available cameras -");
    ArrayList<String> names = new ArrayList<String>();
    while (enumeration.hasMoreElements()) {
      CaptureDeviceInfo cdi = (CaptureDeviceInfo) enumeration.nextElement();
      String name = cdi.getName();
      if (name.startsWith("vfw:")) {
        names.add(name);
        System.out.println(name);
      }
    }

    // String str1 = "vfw:Logitech USB Video Camera:0";
    // String str2 = "vfw:Microsoft WDM Image Capture (Win32):0";
    if (names.size() == 0) {
      JOptionPane.showMessageDialog(
          null,
          "Ingen kamera funnet. " + "Du må koble til et kamera for å kjøre programmet.",
          "Feil",
          JOptionPane.ERROR_MESSAGE);
      System.exit(0);
    } else if (names.size() > 1) {

      JOptionPane.showMessageDialog(
          null,
          "Fant mer enn 1 kamera. " + "Velger da:\n" + names.get(0),
          "Advarsel",
          JOptionPane.WARNING_MESSAGE);
    }

    String str2 = names.get(0);
    di = CaptureDeviceManager.getDevice(str2);
    ml = di.getLocator();

    try {
      player = Manager.createRealizedPlayer(ml);
      formatControl = (FormatControl) player.getControl("javax.media.control.FormatControl");

      /*
      Format[] formats = formatControl.getSupportedFormats();
      for (int i=0; i<formats.length; i++)
      	System.out.println(formats[i].toString());
      */

      player.start();
    } catch (javax.media.NoPlayerException e) {
      JOptionPane.showMessageDialog(
          null,
          "Klarer ikke å starte" + " programmet pga. feil med kamera. Sjekk at det er koblet til.",
          "IOException",
          JOptionPane.ERROR_MESSAGE);
      System.exit(0);
    } catch (Exception e) {
      e.printStackTrace();
      System.exit(0);
    }

    /*
     * Layout
     *
     * Add
     * - comp
     * - imagepanels
     */

    if (layout.equals("1024v2")) {
      layout1024v2();
    } else if (layout.equals("1280")) {
      layout1280();
    } else {
      layout1024();
    }

    // Capture Window
    if (captureWindow) {
      cw = new JFrame("Capture from webcam");
      cw.setAlwaysOnTop(true);
      cw.setSize(sizeCaptureWindow_x, sizeCaptureWindow_y);
      cw.addKeyListener(new captureWindowKeyListner());
      cw.setUndecorated(true);

      // Add webcam
      if ((comp = player.getVisualComponent()) != null) {
        cw.add(comp);
      }

      // Add panel to window and set location of window
      cw.setLocation(cwLocation_x, cwLocation_y);
    }

    // Text window
    cwText = new rotatedText("");

    /*
     * Timer for update
     */
    Timer thread = new Timer();
    thread.schedule(new frameUpdateTask(), 0, (1000 / fps));
  }