コード例 #1
0
ファイル: Capture_video.java プロジェクト: cowboysaif/java
  public static void stop() {
    try {

      Thread.sleep(5000);
      FrameGrabbingControl fgc =
          (FrameGrabbingControl) player.getControl("javax.media.control.FrameGrabbingControl");

      Buffer buf = fgc.grabFrame();

      BufferToImage btoi = new BufferToImage((VideoFormat) buf.getFormat());
      player.stop();
      Image img = btoi.createImage(buf);

      int w = img.getWidth(null);

      int h = img.getHeight(null);

      BufferedImage bi = new BufferedImage(w, h, BufferedImage.TYPE_INT_RGB);

      Graphics2D g2 = bi.createGraphics();

      g2.drawImage(img, 0, 0, null);

      g2.dispose();
      try {
        ImageIO.write(bi, "jpg", new File("C:/ii.jpg"));

      } catch (IOException ex) {
        Logger.getLogger(Capture_video.class.getName()).log(Level.SEVERE, null, ex);
      }
    } catch (InterruptedException ex) {
      Logger.getLogger(Capture_video.class.getName()).log(Level.SEVERE, null, ex);
    }
  }
コード例 #2
0
ファイル: PhotoState.java プロジェクト: JostCrow/NRG
  @Override
  public void update(GameContainer gameContainer, StateBasedGame stateBasedGame, int delta)
      throws SlickException {
    super.update(gameContainer, stateBasedGame, delta);
    rotation += (targetrotation - rotation) / rotationEase;
    tandwiel1.setRotation(rotation);
    tandwiel2.setRotation((float) ((float) -(rotation * 1.818181818181818) + 16.36363636363636));
    spinner.setRotation(rotation);

    if (frameGrabber != null && updateCamera) {
      updateCamera = false;
      buffer = frameGrabber.grabFrame();
      awtFrame = new BufferToImage((VideoFormat) buffer.getFormat()).createImage(buffer);
      BufferedImage bufferedImage =
          new BufferedImage(
              awtFrame.getWidth(null), awtFrame.getHeight(null), BufferedImage.TYPE_INT_RGB);
      bufferedImage.createGraphics().drawImage(awtFrame, 0, 0, this);
      baseImage = bufferedImage;
      try {
        texture = BufferedImageUtil.getTexture("", baseImage);
        webcamFeed.setTexture(texture);
      } catch (IOException e) {
        logger.error(e);
      }
    }
    calculatePulse();
  }
コード例 #3
0
  public void captureImage() {
    String savepath =
        this.saveDirectory + "\\cam" + this.getDateFormatNow("yyyyMMdd_HHmmss-S") + ".jpg";
    System.out.println("Capturing current image to " + savepath);

    // Grab a frame
    FrameGrabbingControl fgc =
        (FrameGrabbingControl) player.getControl("javax.media.control.FrameGrabbingControl");
    buf = fgc.grabFrame();

    // Convert it to an image
    btoi = new BufferToImage((VideoFormat) buf.getFormat());
    img = btoi.createImage(buf);

    // save image
    saveJPG(img, savepath);

    // show the image
    // imgpanel.setImage(img);

    // images.add(img);
    images.add(savepath);

    if (images_lastadded.size() >= lastadded_max) {
      // Remove last
      images_lastadded.remove(images_lastadded.size() - 1);
    }

    images_lastadded.add(0, images.size() - 1);
    images_nevershown.add(0, images.size() - 1);

    forceNewImage();
  }
コード例 #4
0
  private void newImage(Buffer buffer) {
    Object data = buffer.getData();
    if (!(data instanceof int[])) return;
    RGBFormat format = (RGBFormat) buffer.getFormat();

    DirectColorModel dcm =
        new DirectColorModel(
            format.getBitsPerPixel(),
            format.getRedMask(),
            format.getGreenMask(),
            format.getBlueMask());

    sourceImage =
        new MemoryImageSource(
            format.getLineStride(),
            format.getSize().height,
            dcm,
            (int[]) data,
            0,
            format.getLineStride());
    sourceImage.setAnimated(true);
    sourceImage.setFullBufferUpdates(true);
    if (component != null) {
      destImage = component.createImage(sourceImage);
      component.prepareImage(destImage, component);
    }
  }
コード例 #5
0
  /**
   * Blocks and reads into a <tt>Buffer</tt> from this <tt>PullBufferStream</tt>.
   *
   * @param buffer the <tt>Buffer</tt> this <tt>PullBufferStream</tt> is to read into
   * @throws IOException if an I/O error occurs while this <tt>PullBufferStream</tt> reads into the
   *     specified <tt>Buffer</tt>
   * @see AbstractVideoPullBufferStream#doRead(Buffer)
   */
  @Override
  protected void doRead(Buffer buffer) throws IOException {
    /*
     * Determine the Format in which we're expected to output. We cannot
     * rely on the Format always being specified in the Buffer because it is
     * not its responsibility, the DataSource of this ImageStream knows the
     * output Format.
     */
    Format format = buffer.getFormat();

    if (format == null) {
      format = getFormat();
      if (format != null) buffer.setFormat(format);
    }

    if (format instanceof AVFrameFormat) {
      Object o = buffer.getData();
      AVFrame frame;

      if (o instanceof AVFrame) frame = (AVFrame) o;
      else {
        frame = new AVFrame();
        buffer.setData(frame);
      }

      AVFrameFormat avFrameFormat = (AVFrameFormat) format;
      Dimension size = avFrameFormat.getSize();
      ByteBuffer data = readScreenNative(size);

      if (data != null) {
        if (frame.avpicture_fill(data, avFrameFormat) < 0) {
          data.free();
          throw new IOException("avpicture_fill");
        }
      } else {
        /*
         * This can happen when we disconnect a monitor from computer
         * before or during grabbing.
         */
        throw new IOException("Failed to grab screen.");
      }
    } else {
      byte[] bytes = (byte[]) buffer.getData();
      Dimension size = ((VideoFormat) format).getSize();

      bytes = readScreen(bytes, size);

      buffer.setData(bytes);
      buffer.setOffset(0);
      buffer.setLength(bytes.length);
    }

    buffer.setHeader(null);
    buffer.setTimeStamp(System.nanoTime());
    buffer.setSequenceNumber(seqNo);
    buffer.setFlags(Buffer.FLAG_SYSTEM_TIME | Buffer.FLAG_LIVE_DATA);
    seqNo++;
  }
コード例 #6
0
  /** Processes the data and renders it to a component */
  public synchronized int process(Buffer buffer) {
    if (component == null) return BUFFER_PROCESSED_FAILED;

    Format inf = buffer.getFormat();
    if (inf == null) return BUFFER_PROCESSED_FAILED;

    if (inf != inputFormat || !buffer.getFormat().equals(inputFormat)) {
      if (setInputFormat(inf) != null) return BUFFER_PROCESSED_FAILED;
    }

    Object data = buffer.getData();
    if (!(data instanceof int[])) return BUFFER_PROCESSED_FAILED;

    if (lastBuffer != buffer) {
      lastBuffer = buffer;
      newImage(buffer);
    }

    sourceImage.newPixels(0, 0, inWidth, inHeight);

    Graphics g = component.getGraphics();
    if (g != null) {
      if (reqBounds == null) {
        bounds = component.getBounds();
        bounds.x = 0;
        bounds.y = 0;
      } else bounds = reqBounds;
      g.drawImage(
          destImage,
          bounds.x,
          bounds.y,
          bounds.width,
          bounds.height,
          0,
          0,
          inWidth,
          inHeight,
          component);
    }

    return BUFFER_PROCESSED_OK;
  }
コード例 #7
0
    @Override
    public void read(Buffer buffer) throws IOException {
      pbs.read(buffer);

      // Remap the time stamps so it won't wrap around
      // while changing to a new file.
      if (buffer.getTimeStamp() != Buffer.TIME_UNKNOWN) {
        long diff = buffer.getTimeStamp() - lastTS;
        lastTS = buffer.getTimeStamp();
        if (diff > 0) timeStamp += diff;
        buffer.setTimeStamp(timeStamp);
      }

      // If this track is to be used as the master time base,
      // we'll need to compute the master time based on this track.
      if (useAsMaster) {
        if (buffer.getFormat() instanceof AudioFormat) {
          AudioFormat af = (AudioFormat) buffer.getFormat();
          masterAudioLen += buffer.getLength();
          long t = af.computeDuration(masterAudioLen);
          if (t > 0) {
            masterTime = t;
          } else {
            masterTime = buffer.getTimeStamp();
          }
        } else {
          masterTime = buffer.getTimeStamp();
        }
      }

      if (buffer.isEOM()) {
        tInfo.done = true;
        if (!ds.handleEOM(tInfo)) {
          // This is not the last processor to be done.
          // We'll need to un-set the EOM flag.
          buffer.setEOM(false);
          buffer.setDiscard(true);
        }
      }
    }
コード例 #8
0
ファイル: ImageIODecoder.java プロジェクト: MrSampson/fmj
  @Override
  public int process(Buffer input, Buffer output) {
    if (!checkInputBuffer(input)) {
      return BUFFER_PROCESSED_FAILED;
    }

    if (isEOM(input)) {
      propagateEOM(output); // TODO: what about data? can there be any?
      return BUFFER_PROCESSED_OK;
    }

    try {
      // TODO: this is very inefficient - it allocates a new byte array
      // (or more) every time
      final ByteArrayInputStream is =
          new ByteArrayInputStream((byte[]) input.getData(), input.getOffset(), input.getLength());
      final BufferedImage image = ImageIO.read(is);
      is.close();
      final Buffer b =
          ImageToBuffer.createBuffer(image, ((VideoFormat) outputFormat).getFrameRate());

      output.setData(b.getData());
      output.setOffset(b.getOffset());
      output.setLength(b.getLength());
      output.setFormat(b.getFormat()); // TODO: this is a bit hacky, this
      // format will be more specific
      // than the actual set output
      // format, because now we know what
      // ImageIO gave us for a
      // BufferedImage as far as pixel
      // masks, etc.

      return BUFFER_PROCESSED_OK;

    } catch (IOException e) {
      output.setDiscard(true);
      output.setLength(0);
      return BUFFER_PROCESSED_FAILED;
    }
  }
コード例 #9
0
ファイル: JAIEffect.java プロジェクト: elecnix/jmf
  public int process(Buffer inBuffer, Buffer outBuffer) {
    try {
      if (frameConverter == null) {
        frameConverter = new BufferToImage((VideoFormat) inBuffer.getFormat());
      }

      // Convert the Buffer to an AWT Image.
      Image frameImage = frameConverter.createImage(inBuffer);

      // Derive a JAI image from the AWT image.
      PlanarImage jaiImage = JAI.create("AWTImage", frameImage);

      int index;
      boolean emboss = false;
      if (control != null) {
        index = control.getEffectIndex();
        if (control.getEffectName().equals("None")) {
          outBuffer.setData(inBuffer.getData());
          outBuffer.setFormat(inBuffer.getFormat());
          outBuffer.setFlags(inBuffer.getFlags());
          outBuffer.setLength(inBuffer.getLength());
          return BUFFER_PROCESSED_OK;
        }
        if (control.getEffectName().equals("Emboss")) {
          emboss = true; // Special case
        }
      } else index = 0;

      if (kernels[index] == null) {
        kernels[index] = new KernelJAI(3, 3, matrices[index]);
      }

      jaiImage = JAI.create("convolve", jaiImage, kernels[index]);

      if (emboss) { // add 128 to make it brighter
        double[] constants = new double[] {128., 128., 128.};
        ParameterBlock pb = new ParameterBlock();
        pb.addSource(jaiImage);
        pb.add(constants);
        jaiImage = JAI.create("addconst", pb, null);
      }

      // Now convert the image to a buffer
      BufferedImage bim = jaiImage.getAsBufferedImage();

      Buffer out = ImageToBuffer.createBuffer(bim, 15.F);
      if (out == null) {
        if (debug) {
          System.out.println("ImageToBuffer returned null");
        }
        return BUFFER_PROCESSED_FAILED;
      }

      outBuffer.setData(out.getData());
      outBuffer.setFormat(out.getFormat());
      outBuffer.setFlags(out.getFlags());
      outBuffer.setLength(out.getLength());
    } catch (Exception e) {
      System.err.println(e);
      return BUFFER_PROCESSED_FAILED;
    } catch (Error e) {
      System.err.println(e);
      return BUFFER_PROCESSED_FAILED;
    }
    return BUFFER_PROCESSED_OK;
  }
コード例 #10
0
  /** Write the buffer to the SourceDataLine. */
  public int process(Buffer buffer) {

    // if we need to convert the format, do so using the codec.
    if (codec != null) {
      final int codecResult = codec.process(buffer, codecBuffer);
      if (codecResult == BUFFER_PROCESSED_FAILED) return BUFFER_PROCESSED_FAILED;
      if (codecResult == OUTPUT_BUFFER_NOT_FILLED) return BUFFER_PROCESSED_OK;
      buffer = codecBuffer;
    }

    int length = buffer.getLength();
    int offset = buffer.getOffset();

    final Format format = buffer.getFormat();

    final Class type = format.getDataType();
    if (type != Format.byteArray) {
      return BUFFER_PROCESSED_FAILED;
    }

    final byte[] data = (byte[]) buffer.getData();

    final boolean bufferNotConsumed;
    final int newBufferLength; // only applicable if bufferNotConsumed
    final int newBufferOffset; // only applicable if bufferNotConsumed

    if (NON_BLOCKING) {
      // TODO: handle sourceLine.available().  This code currently causes choppy audio.

      if (length > sourceLine.available()) {
        // we should only write sourceLine.available() bytes, then return INPUT_BUFFER_NOT_CONSUMED.
        length = sourceLine.available(); // don't try to write more than available
        bufferNotConsumed = true;
        newBufferLength = buffer.getLength() - length;
        newBufferOffset = buffer.getOffset() + length;

      } else {
        bufferNotConsumed = false;
        newBufferLength = length;
        newBufferOffset = offset;
      }
    } else {
      bufferNotConsumed = false;
      newBufferLength = 0;
      newBufferOffset = 0;
    }

    if (length == 0) {
      logger.finer("Buffer has zero length, flags = " + buffer.getFlags());
    }

    // make sure all the bytes are written.
    while (length > 0) {

      // logger.fine("Available: " + sourceLine.available());
      // logger.fine("length: " + length);
      // logger.fine("sourceLine.getBufferSize(): " + sourceLine.getBufferSize());

      final int n =
          sourceLine.write(
              data, offset, length); // TODO: this can block for a very long time if it doesn't
      if (n >= length) break;
      else if (n == 0) {
        // TODO: we could choose to handle a write failure this way,
        // assuming that it is considered legal to call stop while process is being called.
        // however, that seems like a bad idea in general.
        //				if (!sourceLine.isRunning())
        //				{
        //					buffer.setLength(offset);
        //					buffer.setOffset(length);
        //					return INPUT_BUFFER_NOT_CONSUMED;	// our write was interrupted.
        //				}

        logger.warning(
            "sourceLine.write returned 0, offset="
                + offset
                + "; length="
                + length
                + "; available="
                + sourceLine.available()
                + "; frame size in bytes"
                + sourceLine.getFormat().getFrameSize()
                + "; sourceLine.isActive() = "
                + sourceLine.isActive()
                + "; "
                + sourceLine.isOpen()
                + "; sourceLine.isRunning()="
                + sourceLine.isRunning());
        return BUFFER_PROCESSED_FAILED; // sourceLine.write docs indicate that this will only happen
                                        // if there is an error.

      } else {
        offset += n;
        length -= n;
      }
    }

    if (bufferNotConsumed) {
      // return INPUT_BUFFER_NOT_CONSUMED if not all bytes were written

      buffer.setLength(newBufferLength);
      buffer.setOffset(newBufferOffset);
      return INPUT_BUFFER_NOT_CONSUMED;
    }

    if (buffer.isEOM()) {
      // TODO: the proper way to do this is to implement Drainable, and let the processor call our
      // drain method.
      sourceLine
          .drain(); // we need to ensure that the media finishes playing, otherwise the EOM event
                    // will
      // be posted before the media finishes playing.
    }

    return BUFFER_PROCESSED_OK;
  }