コード例 #1
0
 public List<VideoFormat> enumVideoFormats() {
   if (videoFormats != null) return videoFormats;
   videoFormats = new ArrayList<VideoFormat>();
   videoFormats.add(
       new VideoFormatImpl(
           VideoFormat.RGB32,
           cameraImageSize.getWidth(),
           cameraImageSize.getHeight(),
           VideoFormat.FPS_UNKNOWN));
   // just for fun, add one at quarter size:
   videoFormats.add(
       new VideoFormatImpl(
           VideoFormat.RGB32,
           cameraImageSize.getWidth() / 2,
           cameraImageSize.getHeight() / 2,
           VideoFormat.FPS_UNKNOWN));
   return videoFormats;
 }
コード例 #2
0
  private BufferedImage allocBufferedImage(byte[] bytes) {

    // Setting up the buffered image
    // bytesPerRow may be larger than needed for a single row.
    // for example, a Canon DV camera with 720 pixels across may have enough
    // space for 724 pixels.
    final int bytesPerRow = gWorld.getPixMap().getPixelData().getRowBytes();

    // using a byte[] instead of an int[] is more compatible with the allowed CIVIL output formats
    // (always byte[]).

    final int w = cameraImageSize.getWidth();
    final int h = cameraImageSize.getHeight();

    // TODO: we don't need alpha...
    final DataBufferByte db = new DataBufferByte(new byte[][] {bytes}, bytes.length);

    final ComponentSampleModel sm =
        new ComponentSampleModel(
            DataBuffer.TYPE_BYTE,
            w,
            h,
            4,
            bytesPerRow,
            // bigEndian ? ARGB : ABGR
            bigEndian ? new int[] {1, 2, 3, 0} : new int[] {3, 2, 1, 0});
    final WritableRaster r = Raster.createWritableRaster(sm, db, new Point(0, 0));
    // construction borrowed from BufferedImage constructor, for BufferedImage.TYPE_4BYTE_ABGR
    final ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
    int[] nBits = {8, 8, 8, 8};
    // int[] bOffs = {3, 2, 1, 0};
    final ColorModel colorModel =
        new ComponentColorModel(
            cs, nBits, true, false, Transparency.TRANSLUCENT, DataBuffer.TYPE_BYTE);
    return new BufferedImage(colorModel, r, false, null);
  }
コード例 #3
0
  private void initSequenceGrabber() throws QTException {
    if (sequenceGrabberInitialized) return;
    QTSession.open();

    sg = new SequenceGrabber();

    vc = new SGVideoChannel(sg);
    // cameraImageSize = new QDRect(320, 240);
    if (overrideVideoFormat != null)
      cameraImageSize = new QDRect(overrideVideoFormat.getWidth(), overrideVideoFormat.getHeight());
    else {
      cameraImageSize = vc.getSrcVideoBounds();
      logger.info(
          "Camera image size reported as: "
              + cameraImageSize.getWidth()
              + "x"
              + cameraImageSize.getHeight());

      // this is a workaround found at
      // http://rsb.info.nih.gov/ij/plugins/download/QuickTime_Capture.java
      // and other places for the isight, which gives the wrong resolution.
      // TODO: find a better way of identifying the isight.
      Dimension screen = Toolkit.getDefaultToolkit().getScreenSize();
      if (cameraImageSize.getHeight() > screen.height - 40) // iSight camera claims to 1600x1200!
      {
        logger.warning(
            "Camera image size reported as: "
                + cameraImageSize.getWidth()
                + "x"
                + cameraImageSize.getHeight()
                + "; resizing to 640x480");
        cameraImageSize.resize(640, 480);
      }
    }
    // On PPC (big endian) we use: k32ARGBPixelFormat
    // On Intel we use: k32ABGRPixelFormat
    // fails on PPC with DepthErrInvalid: k32ABGRPixelFormat, k32BGRAPixelFormat, k32RGBAPixelFormat
    gWorld =
        new QDGraphics(
            bigEndian ? QDConstants.k32ARGBPixelFormat : QDConstants.k32ABGRPixelFormat,
            cameraImageSize); // set a specific pixel format so we can predictably convert to
                              // buffered image below.
    sg.setGWorld(gWorld, null);
    vc.setBounds(cameraImageSize);
    vc.setUsage(quicktime.std.StdQTConstants.seqGrabRecord);
    vc.setFrameRate(0);
    myCodec = quicktime.std.StdQTConstants.kComponentVideoCodecType;
    vc.setCompressorType(myCodec);
    sequenceGrabberInitialized = true;
  }
コード例 #4
0
 private byte[] allocPixelData() {
   // final int size = gWorld.getPixMap().getPixelData().getSize();
   final int intsPerRow = gWorld.getPixMap().getPixelData().getRowBytes() / 4;
   final int size = intsPerRow * cameraImageSize.getHeight();
   return new byte[size * 4];
 }