コード例 #1
0
  /**
   * Blocks and reads into a <tt>Buffer</tt> from this <tt>PullBufferStream</tt>.
   *
   * @param buffer the <tt>Buffer</tt> this <tt>PullBufferStream</tt> is to read into
   * @throws IOException if an I/O error occurs while this <tt>PullBufferStream</tt> reads into the
   *     specified <tt>Buffer</tt>
   * @see AbstractVideoPullBufferStream#doRead(Buffer)
   */
  @Override
  protected void doRead(Buffer buffer) throws IOException {
    /*
     * Determine the Format in which we're expected to output. We cannot
     * rely on the Format always being specified in the Buffer because it is
     * not its responsibility, the DataSource of this ImageStream knows the
     * output Format.
     */
    Format format = buffer.getFormat();

    if (format == null) {
      format = getFormat();
      if (format != null) buffer.setFormat(format);
    }

    if (format instanceof AVFrameFormat) {
      Object o = buffer.getData();
      AVFrame frame;

      if (o instanceof AVFrame) frame = (AVFrame) o;
      else {
        frame = new AVFrame();
        buffer.setData(frame);
      }

      AVFrameFormat avFrameFormat = (AVFrameFormat) format;
      Dimension size = avFrameFormat.getSize();
      ByteBuffer data = readScreenNative(size);

      if (data != null) {
        if (frame.avpicture_fill(data, avFrameFormat) < 0) {
          data.free();
          throw new IOException("avpicture_fill");
        }
      } else {
        /*
         * This can happen when we disconnect a monitor from computer
         * before or during grabbing.
         */
        throw new IOException("Failed to grab screen.");
      }
    } else {
      byte[] bytes = (byte[]) buffer.getData();
      Dimension size = ((VideoFormat) format).getSize();

      bytes = readScreen(bytes, size);

      buffer.setData(bytes);
      buffer.setOffset(0);
      buffer.setLength(bytes.length);
    }

    buffer.setHeader(null);
    buffer.setTimeStamp(System.nanoTime());
    buffer.setSequenceNumber(seqNo);
    buffer.setFlags(Buffer.FLAG_SYSTEM_TIME | Buffer.FLAG_LIVE_DATA);
    seqNo++;
  }
コード例 #2
0
ファイル: DePacketizer.java プロジェクト: elecnix/jmf
  /** decode the buffer * */
  public int process(Buffer inputBuffer, Buffer outputBuffer) {

    if (!checkInputBuffer(inputBuffer)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(inputBuffer)) {
      propagateEOM(outputBuffer);
      return BUFFER_PROCESSED_OK;
    }

    Object outData = outputBuffer.getData();
    outputBuffer.setData(inputBuffer.getData());
    inputBuffer.setData(outData);
    outputBuffer.setLength(inputBuffer.getLength());
    outputBuffer.setFormat(outputFormat);
    outputBuffer.setOffset(inputBuffer.getOffset());
    return BUFFER_PROCESSED_OK;
  }
コード例 #3
0
ファイル: ImageIODecoder.java プロジェクト: MrSampson/fmj
  @Override
  public int process(Buffer input, Buffer output) {
    if (!checkInputBuffer(input)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(input)) {
      propagateEOM(output); // TODO: what about data? can there be any?
      return BUFFER_PROCESSED_OK;
    }

    try {
      // TODO: this is very inefficient - it allocates a new byte array
      // (or more) every time
      final ByteArrayInputStream is =
          new ByteArrayInputStream((byte[]) input.getData(), input.getOffset(), input.getLength());
      final BufferedImage image = ImageIO.read(is);
      is.close();
      final Buffer b =
          ImageToBuffer.createBuffer(image, ((VideoFormat) outputFormat).getFrameRate());

      output.setData(b.getData());
      output.setOffset(b.getOffset());
      output.setLength(b.getLength());
      output.setFormat(b.getFormat()); // TODO: this is a bit hacky, this
      // format will be more specific
      // than the actual set output
      // format, because now we know what
      // ImageIO gave us for a
      // BufferedImage as far as pixel
      // masks, etc.

      return BUFFER_PROCESSED_OK;

    } catch (IOException e) {
      output.setDiscard(true);
      output.setLength(0);
      return BUFFER_PROCESSED_FAILED;
    }
  }
コード例 #4
0
    @Override
    public void readFrame(Buffer buffer) {
      // example data:
      // --ssBoundary8345
      // Content-Type: image/jpeg
      // Content-Length: 114587

      try {
        String line;
        // eat leading blank lines
        while (true) {
          line = readLine(MAX_LINE_LENGTH);
          if (line == null) {
            buffer.setEOM(true);
            buffer.setLength(0);
            return;
          }

          if (!line.trim().equals("")) break; // end of header
        }

        if (boundary == null) {
          boundary = line.trim(); // TODO: we should be able to get
          // this from the content type, but
          // the content type has this
          // stripped out. So we'll just take
          // the first nonblank line to be the
          // boundary.
          // System.out.println("boundary: " + boundary);
        } else {
          if (!line.trim().equals(boundary)) {
            // throw new IOException("Expected boundary: " +
            // toPrintable(line));
            // TODO: why do we seem to get these when playing back
            // mmr files recorded using FmjTranscode?
            logger.warning("Expected boundary (frame " + framesRead + "): " + toPrintable(line));

            // handle streams that are truncated in the middle of a
            // frame:
            final int eatResult = eatUntil(boundary); // TODO: no
            // need to
            // store the
            // data

            logger.info(
                "Ignored bytes (eom after="
                    + (eatResult < 0)
                    + "): "
                    + (eatResult < 0 ? (-1 * eatResult - 1) : eatResult));
            if (eatResult < 0) {
              buffer.setEOM(true);
              buffer.setLength(0);
              return;
            }

            // now read boundary
            line = readLine(MAX_LINE_LENGTH);
            if (!line.trim().equals(boundary)) {
              throw new RuntimeException("No boundary found after eatUntil(boundary)"); // should
              // never
              // happen
            }
          }
        }

        final Properties properties = new Properties();

        while (true) {
          line = readLine(MAX_LINE_LENGTH);
          if (line == null) {
            buffer.setEOM(true);
            buffer.setLength(0);
            return;
          }

          if (line.trim().equals("")) break; // end of header

          if (!parseProperty(line, properties))
            throw new IOException("Expected property: " + toPrintable(line));
        }

        final String contentType = properties.getProperty("Content-Type".toUpperCase());
        if (contentType == null) {
          logger.warning("Header properties: " + properties);
          throw new IOException("Expected Content-Type in header");
        }

        // check supported content types:
        if (!isSupportedFrameContentType(contentType)) {
          throw new IOException("Unsupported Content-Type: " + contentType);
        }

        if (frameContentType == null) {
          frameContentType = contentType;
        } else {
          if (!contentType.equals(frameContentType))
            throw new IOException(
                "Content type changed during stream from "
                    + frameContentType
                    + " to "
                    + contentType);
        }

        // TODO: check that size doesn't change throughout

        final byte[] data;

        final String contentLenStr = properties.getProperty("Content-Length".toUpperCase());
        if (contentLenStr != null) { // if we know the content length, use it
          final int contentLen;
          try {
            contentLen = Integer.parseInt(contentLenStr);
          } catch (NumberFormatException e) {
            throw new IOException("Invalid content length: " + contentLenStr);
          }

          // now, read the content-length bytes
          data = readFully(contentLen); // TODO: don't realloc each
          // time
        } else {
          // if we don't know the content length, just read until we
          // find the boundary.
          // Some IP cameras don't specify it, like
          // http://webcam-1.duesseldorf.it-on.net/cgi-bin/nph-update.cgi
          data = readUntil(boundary);
        }

        // ext
        final String timestampStr = properties.getProperty(TIMESTAMP_KEY.toUpperCase());
        if (timestampStr != null) {
          try {
            final long timestamp = Long.parseLong(timestampStr);
            buffer.setTimeStamp(timestamp);

          } catch (NumberFormatException e) {
            logger.log(Level.WARNING, "" + e, e);
          }
        }

        if (data == null) {
          buffer.setEOM(true);
          buffer.setLength(0);
          return;
        }

        buffer.setData(data);
        buffer.setOffset(0);
        buffer.setLength(data.length);
        ++framesRead;

      } catch (IOException e) {
        throw new RuntimeException(e);
      }
    }