public void read(Buffer buf) {
    synchronized (buffers) {
      while (buffers.size() == 0 && notDone) {
        try {
          buffers.wait();
        } catch (InterruptedException ex) {
          ex.printStackTrace();
        }
      }

      if (buffers.size() > 0) {
        Buffer newBuf = (Buffer) buffers.get(0);
        int[] newData = (int[]) newBuf.getData();
        buf.setData(newData);
        buf.setLength(newBuf.getLength());
        buf.setOffset(0);
        buf.setFormat(vFormat);
        buf.setFlags(Buffer.FLAG_KEY_FRAME | Buffer.FLAG_NO_DROP);
        buffers.remove(0);
        // System.out.println("Removing buffer: size = " + buffers.size());
      } else {
        buf.setEOM(true);
        buf.setOffset(0);
        buf.setLength(0);
        synchronized (buffers) {
          buffers.notify();
        }
      }
    }
  }
示例#2
0
  /**
   * Blocks and reads into a <tt>Buffer</tt> from this <tt>PullBufferStream</tt>.
   *
   * @param buffer the <tt>Buffer</tt> this <tt>PullBufferStream</tt> is to read into
   * @throws IOException if an I/O error occurs while this <tt>PullBufferStream</tt> reads into the
   *     specified <tt>Buffer</tt>
   * @see AbstractVideoPullBufferStream#doRead(Buffer)
   */
  @Override
  protected void doRead(Buffer buffer) throws IOException {
    /*
     * Determine the Format in which we're expected to output. We cannot
     * rely on the Format always being specified in the Buffer because it is
     * not its responsibility, the DataSource of this ImageStream knows the
     * output Format.
     */
    Format format = buffer.getFormat();

    if (format == null) {
      format = getFormat();
      if (format != null) buffer.setFormat(format);
    }

    if (format instanceof AVFrameFormat) {
      Object o = buffer.getData();
      AVFrame frame;

      if (o instanceof AVFrame) frame = (AVFrame) o;
      else {
        frame = new AVFrame();
        buffer.setData(frame);
      }

      AVFrameFormat avFrameFormat = (AVFrameFormat) format;
      Dimension size = avFrameFormat.getSize();
      ByteBuffer data = readScreenNative(size);

      if (data != null) {
        if (frame.avpicture_fill(data, avFrameFormat) < 0) {
          data.free();
          throw new IOException("avpicture_fill");
        }
      } else {
        /*
         * This can happen when we disconnect a monitor from computer
         * before or during grabbing.
         */
        throw new IOException("Failed to grab screen.");
      }
    } else {
      byte[] bytes = (byte[]) buffer.getData();
      Dimension size = ((VideoFormat) format).getSize();

      bytes = readScreen(bytes, size);

      buffer.setData(bytes);
      buffer.setOffset(0);
      buffer.setLength(bytes.length);
    }

    buffer.setHeader(null);
    buffer.setTimeStamp(System.nanoTime());
    buffer.setSequenceNumber(seqNo);
    buffer.setFlags(Buffer.FLAG_SYSTEM_TIME | Buffer.FLAG_LIVE_DATA);
    seqNo++;
  }
示例#3
0
  /** decode the buffer * */
  public int process(Buffer inputBuffer, Buffer outputBuffer) {

    if (!checkInputBuffer(inputBuffer)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(inputBuffer)) {
      propagateEOM(outputBuffer);
      return BUFFER_PROCESSED_OK;
    }

    Object outData = outputBuffer.getData();
    outputBuffer.setData(inputBuffer.getData());
    inputBuffer.setData(outData);
    outputBuffer.setLength(inputBuffer.getLength());
    outputBuffer.setFormat(outputFormat);
    outputBuffer.setOffset(inputBuffer.getOffset());
    return BUFFER_PROCESSED_OK;
  }
    /**
     * Reads media data from this <tt>PullBufferStream</tt> into a specific <tt>Buffer</tt> with
     * blocking.
     *
     * @param buffer the <tt>Buffer</tt> in which media data is to be read from this
     *     <tt>PullBufferStream</tt>
     * @throws IOException if anything goes wrong while reading media data from this
     *     <tt>PullBufferStream</tt> into the specified <tt>buffer</tt>
     * @see javax.media.protocol.PullBufferStream#read(javax.media.Buffer)
     */
    public void read(Buffer buffer) throws IOException {
      if (setThreadPriority) {
        setThreadPriority = false;
        setThreadPriority();
      }

      Object data = buffer.getData();
      int length = this.length;

      if (data instanceof byte[]) {
        if (((byte[]) data).length < length) data = null;
      } else data = null;
      if (data == null) {
        data = new byte[length];
        buffer.setData(data);
      }

      int toRead = length;
      byte[] bytes = (byte[]) data;
      int offset = 0;

      buffer.setLength(0);
      while (toRead > 0) {
        int read;

        synchronized (this) {
          if (audioRecord.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING)
            read = audioRecord.read(bytes, offset, toRead);
          else break;
        }

        if (read < 0) {
          throw new IOException(
              AudioRecord.class.getName() + "#read(byte[], int, int) returned " + read);
        } else {
          buffer.setLength(buffer.getLength() + read);
          offset += read;
          toRead -= read;
        }
      }
      buffer.setOffset(0);

      // Apply software gain.
      if (gainControl != null) {
        BasicVolumeControl.applyGain(gainControl, bytes, buffer.getOffset(), buffer.getLength());
      }
    }
示例#5
0
  @Override
  public int process(Buffer input, Buffer output) {
    if (!checkInputBuffer(input)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(input)) {
      propagateEOM(output); // TODO: what about data? can there be any?
      return BUFFER_PROCESSED_OK;
    }

    try {
      // TODO: this is very inefficient - it allocates a new byte array
      // (or more) every time
      final ByteArrayInputStream is =
          new ByteArrayInputStream((byte[]) input.getData(), input.getOffset(), input.getLength());
      final BufferedImage image = ImageIO.read(is);
      is.close();
      final Buffer b =
          ImageToBuffer.createBuffer(image, ((VideoFormat) outputFormat).getFrameRate());

      output.setData(b.getData());
      output.setOffset(b.getOffset());
      output.setLength(b.getLength());
      output.setFormat(b.getFormat()); // TODO: this is a bit hacky, this
      // format will be more specific
      // than the actual set output
      // format, because now we know what
      // ImageIO gave us for a
      // BufferedImage as far as pixel
      // masks, etc.

      return BUFFER_PROCESSED_OK;

    } catch (IOException e) {
      output.setDiscard(true);
      output.setLength(0);
      return BUFFER_PROCESSED_FAILED;
    }
  }
    @Override
    public void readFrame(Buffer buffer) {
      // example data:
      // --ssBoundary8345
      // Content-Type: image/jpeg
      // Content-Length: 114587

      try {
        String line;
        // eat leading blank lines
        while (true) {
          line = readLine(MAX_LINE_LENGTH);
          if (line == null) {
            buffer.setEOM(true);
            buffer.setLength(0);
            return;
          }

          if (!line.trim().equals("")) break; // end of header
        }

        if (boundary == null) {
          boundary = line.trim(); // TODO: we should be able to get
          // this from the content type, but
          // the content type has this
          // stripped out. So we'll just take
          // the first nonblank line to be the
          // boundary.
          // System.out.println("boundary: " + boundary);
        } else {
          if (!line.trim().equals(boundary)) {
            // throw new IOException("Expected boundary: " +
            // toPrintable(line));
            // TODO: why do we seem to get these when playing back
            // mmr files recorded using FmjTranscode?
            logger.warning("Expected boundary (frame " + framesRead + "): " + toPrintable(line));

            // handle streams that are truncated in the middle of a
            // frame:
            final int eatResult = eatUntil(boundary); // TODO: no
            // need to
            // store the
            // data

            logger.info(
                "Ignored bytes (eom after="
                    + (eatResult < 0)
                    + "): "
                    + (eatResult < 0 ? (-1 * eatResult - 1) : eatResult));
            if (eatResult < 0) {
              buffer.setEOM(true);
              buffer.setLength(0);
              return;
            }

            // now read boundary
            line = readLine(MAX_LINE_LENGTH);
            if (!line.trim().equals(boundary)) {
              throw new RuntimeException("No boundary found after eatUntil(boundary)"); // should
              // never
              // happen
            }
          }
        }

        final Properties properties = new Properties();

        while (true) {
          line = readLine(MAX_LINE_LENGTH);
          if (line == null) {
            buffer.setEOM(true);
            buffer.setLength(0);
            return;
          }

          if (line.trim().equals("")) break; // end of header

          if (!parseProperty(line, properties))
            throw new IOException("Expected property: " + toPrintable(line));
        }

        final String contentType = properties.getProperty("Content-Type".toUpperCase());
        if (contentType == null) {
          logger.warning("Header properties: " + properties);
          throw new IOException("Expected Content-Type in header");
        }

        // check supported content types:
        if (!isSupportedFrameContentType(contentType)) {
          throw new IOException("Unsupported Content-Type: " + contentType);
        }

        if (frameContentType == null) {
          frameContentType = contentType;
        } else {
          if (!contentType.equals(frameContentType))
            throw new IOException(
                "Content type changed during stream from "
                    + frameContentType
                    + " to "
                    + contentType);
        }

        // TODO: check that size doesn't change throughout

        final byte[] data;

        final String contentLenStr = properties.getProperty("Content-Length".toUpperCase());
        if (contentLenStr != null) { // if we know the content length, use it
          final int contentLen;
          try {
            contentLen = Integer.parseInt(contentLenStr);
          } catch (NumberFormatException e) {
            throw new IOException("Invalid content length: " + contentLenStr);
          }

          // now, read the content-length bytes
          data = readFully(contentLen); // TODO: don't realloc each
          // time
        } else {
          // if we don't know the content length, just read until we
          // find the boundary.
          // Some IP cameras don't specify it, like
          // http://webcam-1.duesseldorf.it-on.net/cgi-bin/nph-update.cgi
          data = readUntil(boundary);
        }

        // ext
        final String timestampStr = properties.getProperty(TIMESTAMP_KEY.toUpperCase());
        if (timestampStr != null) {
          try {
            final long timestamp = Long.parseLong(timestampStr);
            buffer.setTimeStamp(timestamp);

          } catch (NumberFormatException e) {
            logger.log(Level.WARNING, "" + e, e);
          }
        }

        if (data == null) {
          buffer.setEOM(true);
          buffer.setLength(0);
          return;
        }

        buffer.setData(data);
        buffer.setOffset(0);
        buffer.setLength(data.length);
        ++framesRead;

      } catch (IOException e) {
        throw new RuntimeException(e);
      }
    }
示例#7
0
  public int process(Buffer inBuffer, Buffer outBuffer) {
    try {
      if (frameConverter == null) {
        frameConverter = new BufferToImage((VideoFormat) inBuffer.getFormat());
      }

      // Convert the Buffer to an AWT Image.
      Image frameImage = frameConverter.createImage(inBuffer);

      // Derive a JAI image from the AWT image.
      PlanarImage jaiImage = JAI.create("AWTImage", frameImage);

      int index;
      boolean emboss = false;
      if (control != null) {
        index = control.getEffectIndex();
        if (control.getEffectName().equals("None")) {
          outBuffer.setData(inBuffer.getData());
          outBuffer.setFormat(inBuffer.getFormat());
          outBuffer.setFlags(inBuffer.getFlags());
          outBuffer.setLength(inBuffer.getLength());
          return BUFFER_PROCESSED_OK;
        }
        if (control.getEffectName().equals("Emboss")) {
          emboss = true; // Special case
        }
      } else index = 0;

      if (kernels[index] == null) {
        kernels[index] = new KernelJAI(3, 3, matrices[index]);
      }

      jaiImage = JAI.create("convolve", jaiImage, kernels[index]);

      if (emboss) { // add 128 to make it brighter
        double[] constants = new double[] {128., 128., 128.};
        ParameterBlock pb = new ParameterBlock();
        pb.addSource(jaiImage);
        pb.add(constants);
        jaiImage = JAI.create("addconst", pb, null);
      }

      // Now convert the image to a buffer
      BufferedImage bim = jaiImage.getAsBufferedImage();

      Buffer out = ImageToBuffer.createBuffer(bim, 15.F);
      if (out == null) {
        if (debug) {
          System.out.println("ImageToBuffer returned null");
        }
        return BUFFER_PROCESSED_FAILED;
      }

      outBuffer.setData(out.getData());
      outBuffer.setFormat(out.getFormat());
      outBuffer.setFlags(out.getFlags());
      outBuffer.setLength(out.getLength());
    } catch (Exception e) {
      System.err.println(e);
      return BUFFER_PROCESSED_FAILED;
    } catch (Error e) {
      System.err.println(e);
      return BUFFER_PROCESSED_FAILED;
    }
    return BUFFER_PROCESSED_OK;
  }