public void captureImage() {
    String savepath =
        this.saveDirectory + "\\cam" + this.getDateFormatNow("yyyyMMdd_HHmmss-S") + ".jpg";
    System.out.println("Capturing current image to " + savepath);

    // Grab a frame
    FrameGrabbingControl fgc =
        (FrameGrabbingControl) player.getControl("javax.media.control.FrameGrabbingControl");
    buf = fgc.grabFrame();

    // Convert it to an image
    btoi = new BufferToImage((VideoFormat) buf.getFormat());
    img = btoi.createImage(buf);

    // save image
    saveJPG(img, savepath);

    // show the image
    // imgpanel.setImage(img);

    // images.add(img);
    images.add(savepath);

    if (images_lastadded.size() >= lastadded_max) {
      // Remove last
      images_lastadded.remove(images_lastadded.size() - 1);
    }

    images_lastadded.add(0, images.size() - 1);
    images_nevershown.add(0, images.size() - 1);

    forceNewImage();
  }
Example #2
0
  private void newImage(Buffer buffer) {
    Object data = buffer.getData();
    if (!(data instanceof int[])) return;
    RGBFormat format = (RGBFormat) buffer.getFormat();

    DirectColorModel dcm =
        new DirectColorModel(
            format.getBitsPerPixel(),
            format.getRedMask(),
            format.getGreenMask(),
            format.getBlueMask());

    sourceImage =
        new MemoryImageSource(
            format.getLineStride(),
            format.getSize().height,
            dcm,
            (int[]) data,
            0,
            format.getLineStride());
    sourceImage.setAnimated(true);
    sourceImage.setFullBufferUpdates(true);
    if (component != null) {
      destImage = component.createImage(sourceImage);
      component.prepareImage(destImage, component);
    }
  }
Example #3
0
  /**
   * Blocks and reads into a <tt>Buffer</tt> from this <tt>PullBufferStream</tt>.
   *
   * @param buffer the <tt>Buffer</tt> this <tt>PullBufferStream</tt> is to read into
   * @throws IOException if an I/O error occurs while this <tt>PullBufferStream</tt> reads into the
   *     specified <tt>Buffer</tt>
   * @see AbstractVideoPullBufferStream#doRead(Buffer)
   */
  @Override
  protected void doRead(Buffer buffer) throws IOException {
    /*
     * Determine the Format in which we're expected to output. We cannot
     * rely on the Format always being specified in the Buffer because it is
     * not its responsibility, the DataSource of this ImageStream knows the
     * output Format.
     */
    Format format = buffer.getFormat();

    if (format == null) {
      format = getFormat();
      if (format != null) buffer.setFormat(format);
    }

    if (format instanceof AVFrameFormat) {
      Object o = buffer.getData();
      AVFrame frame;

      if (o instanceof AVFrame) frame = (AVFrame) o;
      else {
        frame = new AVFrame();
        buffer.setData(frame);
      }

      AVFrameFormat avFrameFormat = (AVFrameFormat) format;
      Dimension size = avFrameFormat.getSize();
      ByteBuffer data = readScreenNative(size);

      if (data != null) {
        if (frame.avpicture_fill(data, avFrameFormat) < 0) {
          data.free();
          throw new IOException("avpicture_fill");
        }
      } else {
        /*
         * This can happen when we disconnect a monitor from computer
         * before or during grabbing.
         */
        throw new IOException("Failed to grab screen.");
      }
    } else {
      byte[] bytes = (byte[]) buffer.getData();
      Dimension size = ((VideoFormat) format).getSize();

      bytes = readScreen(bytes, size);

      buffer.setData(bytes);
      buffer.setOffset(0);
      buffer.setLength(bytes.length);
    }

    buffer.setHeader(null);
    buffer.setTimeStamp(System.nanoTime());
    buffer.setSequenceNumber(seqNo);
    buffer.setFlags(Buffer.FLAG_SYSTEM_TIME | Buffer.FLAG_LIVE_DATA);
    seqNo++;
  }
Example #4
0
  /** Processes the data and renders it to a component */
  public synchronized int process(Buffer buffer) {
    if (component == null) return BUFFER_PROCESSED_FAILED;

    Format inf = buffer.getFormat();
    if (inf == null) return BUFFER_PROCESSED_FAILED;

    if (inf != inputFormat || !buffer.getFormat().equals(inputFormat)) {
      if (setInputFormat(inf) != null) return BUFFER_PROCESSED_FAILED;
    }

    Object data = buffer.getData();
    if (!(data instanceof int[])) return BUFFER_PROCESSED_FAILED;

    if (lastBuffer != buffer) {
      lastBuffer = buffer;
      newImage(buffer);
    }

    sourceImage.newPixels(0, 0, inWidth, inHeight);

    Graphics g = component.getGraphics();
    if (g != null) {
      if (reqBounds == null) {
        bounds = component.getBounds();
        bounds.x = 0;
        bounds.y = 0;
      } else bounds = reqBounds;
      g.drawImage(
          destImage,
          bounds.x,
          bounds.y,
          bounds.width,
          bounds.height,
          0,
          0,
          inWidth,
          inHeight,
          component);
    }

    return BUFFER_PROCESSED_OK;
  }
    @Override
    public void read(Buffer buffer) throws IOException {
      pbs.read(buffer);

      // Remap the time stamps so it won't wrap around
      // while changing to a new file.
      if (buffer.getTimeStamp() != Buffer.TIME_UNKNOWN) {
        long diff = buffer.getTimeStamp() - lastTS;
        lastTS = buffer.getTimeStamp();
        if (diff > 0) timeStamp += diff;
        buffer.setTimeStamp(timeStamp);
      }

      // If this track is to be used as the master time base,
      // we'll need to compute the master time based on this track.
      if (useAsMaster) {
        if (buffer.getFormat() instanceof AudioFormat) {
          AudioFormat af = (AudioFormat) buffer.getFormat();
          masterAudioLen += buffer.getLength();
          long t = af.computeDuration(masterAudioLen);
          if (t > 0) {
            masterTime = t;
          } else {
            masterTime = buffer.getTimeStamp();
          }
        } else {
          masterTime = buffer.getTimeStamp();
        }
      }

      if (buffer.isEOM()) {
        tInfo.done = true;
        if (!ds.handleEOM(tInfo)) {
          // This is not the last processor to be done.
          // We'll need to un-set the EOM flag.
          buffer.setEOM(false);
          buffer.setDiscard(true);
        }
      }
    }
Example #6
0
  @Override
  public int process(Buffer input, Buffer output) {
    if (!checkInputBuffer(input)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(input)) {
      propagateEOM(output); // TODO: what about data? can there be any?
      return BUFFER_PROCESSED_OK;
    }

    try {
      // TODO: this is very inefficient - it allocates a new byte array
      // (or more) every time
      final ByteArrayInputStream is =
          new ByteArrayInputStream((byte[]) input.getData(), input.getOffset(), input.getLength());
      final BufferedImage image = ImageIO.read(is);
      is.close();
      final Buffer b =
          ImageToBuffer.createBuffer(image, ((VideoFormat) outputFormat).getFrameRate());

      output.setData(b.getData());
      output.setOffset(b.getOffset());
      output.setLength(b.getLength());
      output.setFormat(b.getFormat()); // TODO: this is a bit hacky, this
      // format will be more specific
      // than the actual set output
      // format, because now we know what
      // ImageIO gave us for a
      // BufferedImage as far as pixel
      // masks, etc.

      return BUFFER_PROCESSED_OK;

    } catch (IOException e) {
      output.setDiscard(true);
      output.setLength(0);
      return BUFFER_PROCESSED_FAILED;
    }
  }
Example #7
0
  public int process(Buffer inBuffer, Buffer outBuffer) {
    try {
      if (frameConverter == null) {
        frameConverter = new BufferToImage((VideoFormat) inBuffer.getFormat());
      }

      // Convert the Buffer to an AWT Image.
      Image frameImage = frameConverter.createImage(inBuffer);

      // Derive a JAI image from the AWT image.
      PlanarImage jaiImage = JAI.create("AWTImage", frameImage);

      int index;
      boolean emboss = false;
      if (control != null) {
        index = control.getEffectIndex();
        if (control.getEffectName().equals("None")) {
          outBuffer.setData(inBuffer.getData());
          outBuffer.setFormat(inBuffer.getFormat());
          outBuffer.setFlags(inBuffer.getFlags());
          outBuffer.setLength(inBuffer.getLength());
          return BUFFER_PROCESSED_OK;
        }
        if (control.getEffectName().equals("Emboss")) {
          emboss = true; // Special case
        }
      } else index = 0;

      if (kernels[index] == null) {
        kernels[index] = new KernelJAI(3, 3, matrices[index]);
      }

      jaiImage = JAI.create("convolve", jaiImage, kernels[index]);

      if (emboss) { // add 128 to make it brighter
        double[] constants = new double[] {128., 128., 128.};
        ParameterBlock pb = new ParameterBlock();
        pb.addSource(jaiImage);
        pb.add(constants);
        jaiImage = JAI.create("addconst", pb, null);
      }

      // Now convert the image to a buffer
      BufferedImage bim = jaiImage.getAsBufferedImage();

      Buffer out = ImageToBuffer.createBuffer(bim, 15.F);
      if (out == null) {
        if (debug) {
          System.out.println("ImageToBuffer returned null");
        }
        return BUFFER_PROCESSED_FAILED;
      }

      outBuffer.setData(out.getData());
      outBuffer.setFormat(out.getFormat());
      outBuffer.setFlags(out.getFlags());
      outBuffer.setLength(out.getLength());
    } catch (Exception e) {
      System.err.println(e);
      return BUFFER_PROCESSED_FAILED;
    } catch (Error e) {
      System.err.println(e);
      return BUFFER_PROCESSED_FAILED;
    }
    return BUFFER_PROCESSED_OK;
  }