public void captureImage() {
    String savepath =
        this.saveDirectory + "\\cam" + this.getDateFormatNow("yyyyMMdd_HHmmss-S") + ".jpg";
    System.out.println("Capturing current image to " + savepath);

    // Grab a frame
    FrameGrabbingControl fgc =
        (FrameGrabbingControl) player.getControl("javax.media.control.FrameGrabbingControl");
    buf = fgc.grabFrame();

    // Convert it to an image
    btoi = new BufferToImage((VideoFormat) buf.getFormat());
    img = btoi.createImage(buf);

    // save image
    saveJPG(img, savepath);

    // show the image
    // imgpanel.setImage(img);

    // images.add(img);
    images.add(savepath);

    if (images_lastadded.size() >= lastadded_max) {
      // Remove last
      images_lastadded.remove(images_lastadded.size() - 1);
    }

    images_lastadded.add(0, images.size() - 1);
    images_nevershown.add(0, images.size() - 1);

    forceNewImage();
  }
    public VideoTrack(PullSourceStream stream) throws ResourceUnavailableException {
      super();

      this.stream = stream;
      // set format

      // read first frame to determine format
      final Buffer buffer = new Buffer();
      readFrame(buffer);
      if (buffer.isDiscard() || buffer.isEOM())
        throw new ResourceUnavailableException("Unable to read first frame");
      // TODO: catch runtime exception too?

      // parse jpeg
      final java.awt.Image image;
      try {
        image =
            ImageIO.read(
                new ByteArrayInputStream(
                    (byte[]) buffer.getData(), buffer.getOffset(), buffer.getLength()));
      } catch (IOException e) {
        logger.log(Level.WARNING, "" + e, e);
        throw new ResourceUnavailableException("Error reading image: " + e);
      }

      if (image == null) {
        logger.log(Level.WARNING, "Failed to read image (ImageIO.read returned null).");
        throw new ResourceUnavailableException();
      }

      if (frameContentType.equals("image/jpeg"))
        format =
            new JPEGFormat(
                new Dimension(image.getWidth(null), image.getHeight(null)),
                Format.NOT_SPECIFIED,
                Format.byteArray,
                -1.f,
                Format.NOT_SPECIFIED,
                Format.NOT_SPECIFIED);
      else if (frameContentType.equals("image/gif"))
        format =
            new GIFFormat(
                new Dimension(image.getWidth(null), image.getHeight(null)),
                Format.NOT_SPECIFIED,
                Format.byteArray,
                -1.f);
      else if (frameContentType.equals("image/png"))
        format =
            new PNGFormat(
                new Dimension(image.getWidth(null), image.getHeight(null)),
                Format.NOT_SPECIFIED,
                Format.byteArray,
                -1.f);
      else
        throw new ResourceUnavailableException(
            "Unsupported frame content type: " + frameContentType);
      // TODO: this discards first image. save and return first time
      // readFrame is called.

    }
    @Override
    public void readFrame(Buffer buffer) {
      // example data:
      // --ssBoundary8345
      // Content-Type: image/jpeg
      // Content-Length: 114587

      try {
        String line;
        // eat leading blank lines
        while (true) {
          line = readLine(MAX_LINE_LENGTH);
          if (line == null) {
            buffer.setEOM(true);
            buffer.setLength(0);
            return;
          }

          if (!line.trim().equals("")) break; // end of header
        }

        if (boundary == null) {
          boundary = line.trim(); // TODO: we should be able to get
          // this from the content type, but
          // the content type has this
          // stripped out. So we'll just take
          // the first nonblank line to be the
          // boundary.
          // System.out.println("boundary: " + boundary);
        } else {
          if (!line.trim().equals(boundary)) {
            // throw new IOException("Expected boundary: " +
            // toPrintable(line));
            // TODO: why do we seem to get these when playing back
            // mmr files recorded using FmjTranscode?
            logger.warning("Expected boundary (frame " + framesRead + "): " + toPrintable(line));

            // handle streams that are truncated in the middle of a
            // frame:
            final int eatResult = eatUntil(boundary); // TODO: no
            // need to
            // store the
            // data

            logger.info(
                "Ignored bytes (eom after="
                    + (eatResult < 0)
                    + "): "
                    + (eatResult < 0 ? (-1 * eatResult - 1) : eatResult));
            if (eatResult < 0) {
              buffer.setEOM(true);
              buffer.setLength(0);
              return;
            }

            // now read boundary
            line = readLine(MAX_LINE_LENGTH);
            if (!line.trim().equals(boundary)) {
              throw new RuntimeException("No boundary found after eatUntil(boundary)"); // should
              // never
              // happen
            }
          }
        }

        final Properties properties = new Properties();

        while (true) {
          line = readLine(MAX_LINE_LENGTH);
          if (line == null) {
            buffer.setEOM(true);
            buffer.setLength(0);
            return;
          }

          if (line.trim().equals("")) break; // end of header

          if (!parseProperty(line, properties))
            throw new IOException("Expected property: " + toPrintable(line));
        }

        final String contentType = properties.getProperty("Content-Type".toUpperCase());
        if (contentType == null) {
          logger.warning("Header properties: " + properties);
          throw new IOException("Expected Content-Type in header");
        }

        // check supported content types:
        if (!isSupportedFrameContentType(contentType)) {
          throw new IOException("Unsupported Content-Type: " + contentType);
        }

        if (frameContentType == null) {
          frameContentType = contentType;
        } else {
          if (!contentType.equals(frameContentType))
            throw new IOException(
                "Content type changed during stream from "
                    + frameContentType
                    + " to "
                    + contentType);
        }

        // TODO: check that size doesn't change throughout

        final byte[] data;

        final String contentLenStr = properties.getProperty("Content-Length".toUpperCase());
        if (contentLenStr != null) { // if we know the content length, use it
          final int contentLen;
          try {
            contentLen = Integer.parseInt(contentLenStr);
          } catch (NumberFormatException e) {
            throw new IOException("Invalid content length: " + contentLenStr);
          }

          // now, read the content-length bytes
          data = readFully(contentLen); // TODO: don't realloc each
          // time
        } else {
          // if we don't know the content length, just read until we
          // find the boundary.
          // Some IP cameras don't specify it, like
          // http://webcam-1.duesseldorf.it-on.net/cgi-bin/nph-update.cgi
          data = readUntil(boundary);
        }

        // ext
        final String timestampStr = properties.getProperty(TIMESTAMP_KEY.toUpperCase());
        if (timestampStr != null) {
          try {
            final long timestamp = Long.parseLong(timestampStr);
            buffer.setTimeStamp(timestamp);

          } catch (NumberFormatException e) {
            logger.log(Level.WARNING, "" + e, e);
          }
        }

        if (data == null) {
          buffer.setEOM(true);
          buffer.setLength(0);
          return;
        }

        buffer.setData(data);
        buffer.setOffset(0);
        buffer.setLength(data.length);
        ++framesRead;

      } catch (IOException e) {
        throw new RuntimeException(e);
      }
    }
Пример #4
0
  /**
   * Blocks and reads into a <tt>Buffer</tt> from this <tt>PullBufferStream</tt>.
   *
   * @param buffer the <tt>Buffer</tt> this <tt>PullBufferStream</tt> is to read into
   * @throws IOException if an I/O error occurs while this <tt>PullBufferStream</tt> reads into the
   *     specified <tt>Buffer</tt>
   * @see AbstractVideoPullBufferStream#doRead(Buffer)
   */
  @Override
  protected void doRead(Buffer buffer) throws IOException {
    /*
     * Determine the Format in which we're expected to output. We cannot
     * rely on the Format always being specified in the Buffer because it is
     * not its responsibility, the DataSource of this ImageStream knows the
     * output Format.
     */
    Format format = buffer.getFormat();

    if (format == null) {
      format = getFormat();
      if (format != null) buffer.setFormat(format);
    }

    if (format instanceof AVFrameFormat) {
      Object o = buffer.getData();
      AVFrame frame;

      if (o instanceof AVFrame) frame = (AVFrame) o;
      else {
        frame = new AVFrame();
        buffer.setData(frame);
      }

      AVFrameFormat avFrameFormat = (AVFrameFormat) format;
      Dimension size = avFrameFormat.getSize();
      ByteBuffer data = readScreenNative(size);

      if (data != null) {
        if (frame.avpicture_fill(data, avFrameFormat) < 0) {
          data.free();
          throw new IOException("avpicture_fill");
        }
      } else {
        /*
         * This can happen when we disconnect a monitor from computer
         * before or during grabbing.
         */
        throw new IOException("Failed to grab screen.");
      }
    } else {
      byte[] bytes = (byte[]) buffer.getData();
      Dimension size = ((VideoFormat) format).getSize();

      bytes = readScreen(bytes, size);

      buffer.setData(bytes);
      buffer.setOffset(0);
      buffer.setLength(bytes.length);
    }

    buffer.setHeader(null);
    buffer.setTimeStamp(System.nanoTime());
    buffer.setSequenceNumber(seqNo);
    buffer.setFlags(Buffer.FLAG_SYSTEM_TIME | Buffer.FLAG_LIVE_DATA);
    seqNo++;
  }
Пример #5
0
  /** {@inheritDoc} */
  @Override
  protected int doProcess(Buffer inBuffer, Buffer outBuffer) {
    byte[] inData = (byte[]) inBuffer.getData();
    int inOffset = inBuffer.getOffset();

    if (!VP8PayloadDescriptor.isValid(inData, inOffset)) {
      logger.warn("Invalid RTP/VP8 packet discarded.");
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_FAILED; // XXX: FAILED or OK?
    }

    long inSeq = inBuffer.getSequenceNumber();
    long inRtpTimestamp = inBuffer.getRtpTimeStamp();
    int inPictureId = VP8PayloadDescriptor.getPictureId(inData, inOffset);
    boolean inMarker = (inBuffer.getFlags() & Buffer.FLAG_RTP_MARKER) != 0;
    boolean inIsStartOfFrame = VP8PayloadDescriptor.isStartOfFrame(inData, inOffset);
    int inLength = inBuffer.getLength();
    int inPdSize = VP8PayloadDescriptor.getSize(inData, inOffset);
    int inPayloadLength = inLength - inPdSize;

    if (empty && lastSentSeq != -1 && seqNumComparator.compare(inSeq, lastSentSeq) != 1) {
      if (logger.isInfoEnabled()) logger.info("Discarding old packet (while empty) " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    if (!empty) {
      // if the incoming packet has a different PictureID or timestamp
      // than those of the current frame, then it belongs to a different
      // frame.
      if ((inPictureId != -1 && pictureId != -1 && inPictureId != pictureId)
          | (timestamp != -1 && inRtpTimestamp != -1 && inRtpTimestamp != timestamp)) {
        if (seqNumComparator.compare(inSeq, firstSeq) != 1) // inSeq <= firstSeq
        {
          // the packet belongs to a previous frame. discard it
          if (logger.isInfoEnabled()) logger.info("Discarding old packet " + inSeq);
          outBuffer.setDiscard(true);
          return BUFFER_PROCESSED_OK;
        } else // inSeq > firstSeq (and also presumably isSeq > lastSeq)
        {
          // the packet belongs to a subsequent frame (to the one
          // currently being held). Drop the current frame.

          if (logger.isInfoEnabled())
            logger.info(
                "Discarding saved packets on arrival of"
                    + " a packet for a subsequent frame: "
                    + inSeq);

          // TODO: this would be the place to complain about the
          // not-well-received PictureID by sending a RTCP SLI or NACK.
          reinit();
        }
      }
    }

    // a whole frame in a single packet. avoid the extra copy to
    // this.data and output it immediately.
    if (empty && inMarker && inIsStartOfFrame) {
      byte[] outData = validateByteArraySize(outBuffer, inPayloadLength, false);
      System.arraycopy(inData, inOffset + inPdSize, outData, 0, inPayloadLength);
      outBuffer.setOffset(0);
      outBuffer.setLength(inPayloadLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);

      lastSentSeq = inSeq;

      return BUFFER_PROCESSED_OK;
    }

    // add to this.data
    Container container = free.poll();
    if (container == null) container = new Container();
    if (container.buf == null || container.buf.length < inPayloadLength)
      container.buf = new byte[inPayloadLength];

    if (data.get(inSeq) != null) {
      if (logger.isInfoEnabled())
        logger.info("(Probable) duplicate packet detected, discarding " + inSeq);
      outBuffer.setDiscard(true);
      return BUFFER_PROCESSED_OK;
    }

    System.arraycopy(inData, inOffset + inPdSize, container.buf, 0, inPayloadLength);
    container.len = inPayloadLength;
    data.put(inSeq, container);

    // update fields
    frameLength += inPayloadLength;
    if (firstSeq == -1 || (seqNumComparator.compare(firstSeq, inSeq) == 1)) firstSeq = inSeq;
    if (lastSeq == -1 || (seqNumComparator.compare(inSeq, lastSeq) == 1)) lastSeq = inSeq;

    if (empty) {
      // the first received packet for the current frame was just added
      empty = false;
      timestamp = inRtpTimestamp;
      pictureId = inPictureId;
    }

    if (inMarker) haveEnd = true;
    if (inIsStartOfFrame) haveStart = true;

    // check if we have a full frame
    if (frameComplete()) {
      byte[] outData = validateByteArraySize(outBuffer, frameLength, false);
      int ptr = 0;
      Container b;
      for (Map.Entry<Long, Container> entry : data.entrySet()) {
        b = entry.getValue();
        System.arraycopy(b.buf, 0, outData, ptr, b.len);
        ptr += b.len;
      }

      outBuffer.setOffset(0);
      outBuffer.setLength(frameLength);
      outBuffer.setRtpTimeStamp(inBuffer.getRtpTimeStamp());

      if (TRACE) logger.trace("Out PictureID=" + inPictureId);
      lastSentSeq = lastSeq;

      // prepare for the next frame
      reinit();

      return BUFFER_PROCESSED_OK;
    } else {
      // frame not complete yet
      outBuffer.setDiscard(true);
      return OUTPUT_BUFFER_NOT_FILLED;
    }
  }
Пример #6
0
  @Override
  public int process(Buffer input, Buffer output) {
    if (!checkInputBuffer(input)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(input)) {
      propagateEOM(output); // TODO: what about data? can there be any?
      return BUFFER_PROCESSED_OK;
    }

    try {
      // TODO: this is very inefficient - it allocates a new byte array
      // (or more) every time
      final ByteArrayInputStream is =
          new ByteArrayInputStream((byte[]) input.getData(), input.getOffset(), input.getLength());
      final BufferedImage image = ImageIO.read(is);
      is.close();
      final Buffer b =
          ImageToBuffer.createBuffer(image, ((VideoFormat) outputFormat).getFrameRate());

      output.setData(b.getData());
      output.setOffset(b.getOffset());
      output.setLength(b.getLength());
      output.setFormat(b.getFormat()); // TODO: this is a bit hacky, this
      // format will be more specific
      // than the actual set output
      // format, because now we know what
      // ImageIO gave us for a
      // BufferedImage as far as pixel
      // masks, etc.

      return BUFFER_PROCESSED_OK;

    } catch (IOException e) {
      output.setDiscard(true);
      output.setLength(0);
      return BUFFER_PROCESSED_FAILED;
    }
  }
    @Override
    public void read(Buffer buffer) throws IOException {
      pbs.read(buffer);

      // Remap the time stamps so it won't wrap around
      // while changing to a new file.
      if (buffer.getTimeStamp() != Buffer.TIME_UNKNOWN) {
        long diff = buffer.getTimeStamp() - lastTS;
        lastTS = buffer.getTimeStamp();
        if (diff > 0) timeStamp += diff;
        buffer.setTimeStamp(timeStamp);
      }

      // If this track is to be used as the master time base,
      // we'll need to compute the master time based on this track.
      if (useAsMaster) {
        if (buffer.getFormat() instanceof AudioFormat) {
          AudioFormat af = (AudioFormat) buffer.getFormat();
          masterAudioLen += buffer.getLength();
          long t = af.computeDuration(masterAudioLen);
          if (t > 0) {
            masterTime = t;
          } else {
            masterTime = buffer.getTimeStamp();
          }
        } else {
          masterTime = buffer.getTimeStamp();
        }
      }

      if (buffer.isEOM()) {
        tInfo.done = true;
        if (!ds.handleEOM(tInfo)) {
          // This is not the last processor to be done.
          // We'll need to un-set the EOM flag.
          buffer.setEOM(false);
          buffer.setDiscard(true);
        }
      }
    }
Пример #8
0
  /** decode the buffer * */
  public int process(Buffer inputBuffer, Buffer outputBuffer) {

    if (!checkInputBuffer(inputBuffer)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(inputBuffer)) {
      propagateEOM(outputBuffer);
      return BUFFER_PROCESSED_OK;
    }

    Object outData = outputBuffer.getData();
    outputBuffer.setData(inputBuffer.getData());
    inputBuffer.setData(outData);
    outputBuffer.setLength(inputBuffer.getLength());
    outputBuffer.setFormat(outputFormat);
    outputBuffer.setOffset(inputBuffer.getOffset());
    return BUFFER_PROCESSED_OK;
  }
  public int process(Buffer inputBuffer, Buffer outputBuffer) {

    if (pendingFrames > 0) {
      // System.out.println("packetizing");
      return BUFFER_PROCESSED_OK;
    }

    if (!checkInputBuffer(inputBuffer)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(inputBuffer)) {
      propagateEOM(outputBuffer);
      return BUFFER_PROCESSED_OK;
    }

    int inpOffset = inputBuffer.getOffset();
    int inpLength = inputBuffer.getLength();
    int outLength = 0;
    int outOffset = 0;
    byte[] inpData = (byte[]) inputBuffer.getData();
    byte[] outData =
        validateByteArraySize(outputBuffer, calculateOutputSize(inpData.length + historySize));
    int historyLength = history.getLength();
    byte[] historyData = validateByteArraySize(history, historySize);
    int framesNumber = calculateFramesNumber(inpData.length + historySize);

    if ((regions == null) || (regions.length < framesNumber + 1))
      regions = new int[framesNumber + 1];

    if ((regionsTypes == null) || (regionsTypes.length < framesNumber))
      regionsTypes = new int[framesNumber];

    if (historyLength != 0) {
      int bytesToCopy = (historyData.length - historyLength);
      if (bytesToCopy > inpLength) {
        bytesToCopy = inpLength;
      }

      System.arraycopy(inpData, inpOffset, historyData, historyLength, bytesToCopy);

      codecProcess(
          historyData,
          0,
          outData,
          outOffset,
          historyLength + bytesToCopy,
          readBytes,
          writeBytes,
          frameNumber,
          regions,
          regionsTypes);

      if (readBytes[0] <= 0) {
        if (writeBytes[0] <= 0) {
          // System.err.println("Returning output buffer not filled");
          return OUTPUT_BUFFER_NOT_FILLED;
        } else {
          updateOutput(outputBuffer, outputFormat, writeBytes[0], 0);
          // System.err.println("Returning OK");
          return BUFFER_PROCESSED_OK;
        }
      }

      // System.out.println("1: "+inpLength+" "+readBytes[0]+" "+writeBytes[0]);

      outOffset += writeBytes[0];
      outLength += writeBytes[0];

      inpOffset += (readBytes[0] - historyLength);
      inpLength += (historyLength - readBytes[0]);
    }

    codecProcess(
        inpData,
        inpOffset,
        outData,
        outOffset,
        inpLength,
        readBytes,
        writeBytes,
        frameNumber,
        regions,
        regionsTypes);
    // System.out.println("2: "+inpLength+" "+readBytes[0]+" "+writeBytes[0]);

    // debug
    // for (int i=0; i<frameNumber[0];i++ ) {
    // System.out.println(i+" "+regions[i]+" - "+regions[i+1]+" type "+regionsTypes[i]);
    // }

    outLength += writeBytes[0];

    inpOffset += readBytes[0];
    inpLength -= readBytes[0];

    System.arraycopy(inpData, inpOffset, historyData, 0, inpLength);
    history.setLength(inpLength);

    updateOutput(outputBuffer, outputFormat, outLength, 0);

    return BUFFER_PROCESSED_OK;
  }
 public void reset() {
   history.setLength(0);
   codecReset();
 }