Example #1
0
 /**
  * Encode the uncompressed source image stored in <code>srcImage</code> and output a YUV planar
  * image to the given destination buffer. See {@link #encodeYUV(byte[], int)} for more detail.
  *
  * @param srcImage a <code>BufferedImage</code> instance containing RGB or grayscale pixels to be
  *     encoded
  * @param dstBuf buffer that will receive the YUV planar image. Use {@link TJ#bufSizeYUV} to
  *     determine the appropriate size for this buffer based on the image width, height, and level
  *     of chrominance subsampling.
  * @param flags the bitwise OR of one or more of {@link TJ TJ.FLAG_*}
  */
 public void encodeYUV(BufferedImage srcImage, byte[] dstBuf, int flags) throws Exception {
   if (srcImage == null || dstBuf == null || flags < 0)
     throw new Exception("Invalid argument in encodeYUV()");
   int width = srcImage.getWidth();
   int height = srcImage.getHeight();
   int pixelFormat;
   boolean intPixels = false;
   if (byteOrder == null) byteOrder = ByteOrder.nativeOrder();
   switch (srcImage.getType()) {
     case BufferedImage.TYPE_3BYTE_BGR:
       pixelFormat = TJ.PF_BGR;
       break;
     case BufferedImage.TYPE_4BYTE_ABGR:
     case BufferedImage.TYPE_4BYTE_ABGR_PRE:
       pixelFormat = TJ.PF_XBGR;
       break;
     case BufferedImage.TYPE_BYTE_GRAY:
       pixelFormat = TJ.PF_GRAY;
       break;
     case BufferedImage.TYPE_INT_BGR:
       if (byteOrder == ByteOrder.BIG_ENDIAN) pixelFormat = TJ.PF_XBGR;
       else pixelFormat = TJ.PF_RGBX;
       intPixels = true;
       break;
     case BufferedImage.TYPE_INT_RGB:
     case BufferedImage.TYPE_INT_ARGB:
     case BufferedImage.TYPE_INT_ARGB_PRE:
       if (byteOrder == ByteOrder.BIG_ENDIAN) pixelFormat = TJ.PF_XRGB;
       else pixelFormat = TJ.PF_BGRX;
       intPixels = true;
       break;
     default:
       throw new Exception("Unsupported BufferedImage format");
   }
   WritableRaster wr = srcImage.getRaster();
   if (subsamp < 0) throw new Exception("Subsampling level not set");
   if (intPixels) {
     SinglePixelPackedSampleModel sm = (SinglePixelPackedSampleModel) srcImage.getSampleModel();
     int pitch = sm.getScanlineStride();
     DataBufferInt db = (DataBufferInt) wr.getDataBuffer();
     int[] buf = db.getData();
     encodeYUV(buf, width, pitch, height, pixelFormat, dstBuf, subsamp, flags);
   } else {
     ComponentSampleModel sm = (ComponentSampleModel) srcImage.getSampleModel();
     int pixelSize = sm.getPixelStride();
     if (pixelSize != TJ.getPixelSize(pixelFormat))
       throw new Exception("Inconsistency between pixel format and pixel size in BufferedImage");
     int pitch = sm.getScanlineStride();
     DataBufferByte db = (DataBufferByte) wr.getDataBuffer();
     byte[] buf = db.getData();
     encodeYUV(buf, width, pitch, height, pixelFormat, dstBuf, subsamp, flags);
   }
   compressedSize = TJ.bufSizeYUV(width, height, subsamp);
 }
Example #2
0
 /**
  * _more_
  *
  * @param image_data _more_
  */
 private void createBufferedImage(float image_data[][]) {
   WritableRaster raster = null;
   int num_bands = 0;
   if (null != preview_image) {
     preview_image.getSampleModel().getNumBands();
   }
   if ((null == preview_image) || (image_data.length != num_bands)) {
     if (image_data.length == 1) {
       preview_image =
           new BufferedImage(subSampledPixels, subSampledScans, BufferedImage.TYPE_BYTE_GRAY);
     } else {
       preview_image =
           new BufferedImage(subSampledPixels, subSampledScans, BufferedImage.TYPE_3BYTE_BGR);
     }
     DataBufferFloat dbuf =
         new DataBufferFloat(image_data, subSampledPixels * subSampledScans * image_data.length);
     SampleModel sampleModel =
         new BandedSampleModel(0, subSampledPixels, subSampledScans, image_data.length);
     raster = Raster.createWritableRaster(sampleModel, dbuf, new Point(0, 0));
     preview_image.setData(raster);
   } else if (1 == num_bands) {
     preview_image
         .getRaster()
         .setDataElements(
             0, 0, preview_image.getWidth(), preview_image.getHeight(), image_data[0]);
   } else if (3 == num_bands) {
     preview_image
         .getRaster()
         .setDataElements(0, 0, preview_image.getWidth(), preview_image.getHeight(), image_data);
   }
 }
 @Test
 public void testTranslatedImage() throws Exception {
   BufferedImage bi = new BufferedImage(256, 256, BufferedImage.TYPE_BYTE_GRAY);
   TiledImage image =
       new TiledImage(
           0,
           0,
           256,
           256,
           1,
           1,
           bi.getSampleModel().createCompatibleSampleModel(256, 256),
           bi.getColorModel());
   Graphics g = image.createGraphics();
   g.setColor(Color.WHITE);
   g.fillRect(0, 0, 20, 20);
   g.setColor(new Color(20, 20, 20)); // A dark gray
   g.fillRect(20, 20, 20, 20);
   g.setColor(new Color(200, 200, 200)); // A light gray
   g.fillRect(0, 20, 20, 20);
   g.dispose();
   RenderedImage indexed = quantize(image);
   assertTrue(indexed.getColorModel() instanceof IndexColorModel);
   IndexColorModel icm = (IndexColorModel) indexed.getColorModel();
   assertEquals(4, icm.getMapSize()); // Black background, white fill,
   // light gray fill, dark gray fill =
   // 4 colors
 }
Example #4
0
 /**
  * Basic nonparametric usage of canny edge detector. No thresholding is used.
  *
  * @param original input image
  */
 public CCannyEdgeDetector(BufferedImage original) {
   size = new Dimension(original.getWidth(), original.getHeight());
   input = original.getData();
   image =
       new BufferedImage(
           (int) size.getWidth(), (int) size.getHeight(), BufferedImage.TYPE_INT_RGB);
   bands = original.getSampleModel().getNumBands();
 }
Example #5
0
  protected INDArray toINDArrayBGR(BufferedImage image) {
    int width = image.getWidth();
    int height = image.getHeight();
    int bands = image.getSampleModel().getNumBands();

    byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();
    int[] shape = new int[] {height, width, bands};
    INDArray ret = Nd4j.create(new ImageByteBuffer(pixels, pixels.length), shape);
    return ret.permute(2, 0, 1);
  }
Example #6
0
 private static void initImg(BufferedImage img, int pf, int flags) throws Exception {
   WritableRaster wr = img.getRaster();
   int imgType = img.getType();
   if (imgType == BufferedImage.TYPE_INT_RGB
       || imgType == BufferedImage.TYPE_INT_BGR
       || imgType == BufferedImage.TYPE_INT_ARGB
       || imgType == BufferedImage.TYPE_INT_ARGB_PRE) {
     SinglePixelPackedSampleModel sm = (SinglePixelPackedSampleModel) img.getSampleModel();
     int pitch = sm.getScanlineStride();
     DataBufferInt db = (DataBufferInt) wr.getDataBuffer();
     int[] buf = db.getData();
     initIntBuf(buf, img.getWidth(), pitch, img.getHeight(), pf, flags);
   } else {
     ComponentSampleModel sm = (ComponentSampleModel) img.getSampleModel();
     int pitch = sm.getScanlineStride();
     DataBufferByte db = (DataBufferByte) wr.getDataBuffer();
     byte[] buf = db.getData();
     initBuf(buf, img.getWidth(), pitch, img.getHeight(), pf, flags);
   }
 }
  /**
   * Construct a JPEGmage.
   *
   * @param stream The JPEG InputStream.
   */
  public JPEGImage(InputStream stream) {
    com.sun.image.codec.jpeg.JPEGImageDecoder decoder =
        com.sun.image.codec.jpeg.JPEGCodec.createJPEGDecoder(stream);
    try {
      // decodeAsBufferedImage performs default color conversions
      image = decoder.decodeAsBufferedImage();
    } catch (ImageFormatException e) {
      throw new RuntimeException(JaiI18N.getString("JPEGImageDecoder1"));
    } catch (IOException e) {
      throw new RuntimeException(JaiI18N.getString("JPEGImageDecoder2"));
    }

    minX = 0;
    minY = 0;
    tileWidth = width = image.getWidth();
    tileHeight = height = image.getHeight();

    // Force image to have a ComponentSampleModel
    // since SinglePixelPackedSampleModels are not working
    if (!(image.getSampleModel() instanceof ComponentSampleModel)) {
      int type = -1;
      int numBands = image.getSampleModel().getNumBands();
      if (numBands == 1) {
        type = BufferedImage.TYPE_BYTE_GRAY;
      } else if (numBands == 3) {
        type = BufferedImage.TYPE_3BYTE_BGR;
      } else if (numBands == 4) {
        type = BufferedImage.TYPE_4BYTE_ABGR;
      } else {
        throw new RuntimeException(JaiI18N.getString("JPEGImageDecoder3"));
      }

      BufferedImage bi = new BufferedImage(width, height, type);
      Graphics2D g = bi.createGraphics();
      g.drawRenderedImage(image, new AffineTransform());
      image = bi;
    }

    sampleModel = image.getSampleModel();
    colorModel = image.getColorModel();
  }
  @Test
  public void test4BitPNG() throws Exception {

    // create test image
    IndexColorModel icm =
        new IndexColorModel(
            4,
            16,
            new byte[] {(byte) 255, 0, 0, 0, 16, 32, 64, (byte) 128, 1, 2, 3, 4, 5, 6, 7, 8},
            new byte[] {0, (byte) 255, 0, 0, 16, 32, 64, (byte) 128, 1, 2, 3, 4, 5, 6, 7, 8},
            new byte[] {0, 0, (byte) 255, 0, 16, 32, 64, (byte) 128, 1, 2, 3, 4, 5, 6, 7, 8});
    assertEquals(16, icm.getMapSize());

    // create random data
    WritableRaster data =
        com.sun.media.jai.codecimpl.util.RasterFactory.createWritableRaster(
            icm.createCompatibleSampleModel(32, 32), new Point(0, 0));
    for (int x = data.getMinX(); x < data.getMinX() + data.getWidth(); x++) {
      for (int y = data.getMinY(); y < data.getMinY() + data.getHeight(); y++) {
        data.setSample(x, y, 0, (x + y) % 8);
      }
    }

    final BufferedImage bi = new BufferedImage(icm, data, false, null);
    assertEquals(16, ((IndexColorModel) bi.getColorModel()).getMapSize());
    assertEquals(4, bi.getSampleModel().getSampleSize(0));
    bi.setData(data);
    if (TestData.isInteractiveTest()) {
      ImageIOUtilities.visualize(bi, "before");
    }

    // encode as png
    ImageWorker worker = new ImageWorker(bi);
    final File outFile = TestData.temp(this, "temp4.png");
    worker.writePNG(outFile, "FILTERED", 0.75f, true, false);
    worker.dispose();

    // make sure we can read it
    BufferedImage back = ImageIO.read(outFile);

    // we expect an IndexColorMolde one matching the old one
    IndexColorModel ccm = (IndexColorModel) back.getColorModel();
    assertEquals(3, ccm.getNumColorComponents());
    assertEquals(16, ccm.getMapSize());
    assertEquals(4, ccm.getPixelSize());
    if (TestData.isInteractiveTest()) {
      ImageIOUtilities.visualize(back, "after");
    }
  }
  @Test
  public void testRoundTripTiledImage() throws Exception {
    BufferedImage input = ImageIO.read(sourceFile);

    // prepare a tiled image layout
    ImageLayout il = new ImageLayout(input);
    il.setTileWidth(8);
    il.setTileHeight(8);

    RenderingHints hints = new RenderingHints(JAI.KEY_IMAGE_LAYOUT, il);
    RenderedOp tiled = FormatDescriptor.create(input, input.getSampleModel().getDataType(), hints);
    assertEquals(8, tiled.getTileWidth());
    assertEquals(8, tiled.getTileHeight());

    roundTripPNGJ(input, tiled);
  }
  public void testCreateBufferedImage() {
    ImageTypeSpecifier typeSpecifier =
        ImageTypeSpecifier.createGrayscale(8, DataBuffer.TYPE_BYTE, true);

    int width = 10;
    int height = 10;
    BufferedImage image = typeSpecifier.createBufferedImage(width, height);
    assertEquals(
        "Failed to create with the correct ColorModel",
        typeSpecifier.getColorModel(),
        image.getColorModel());
    assertEquals(
        "Failed to create with the correct SampleModel",
        typeSpecifier.getSampleModel().getClass(),
        image.getSampleModel().getClass());
    assertEquals("Failed to create with the correct width", width, image.getWidth());
    assertEquals("Failed to create with the correct height", height, image.getHeight());
  }
Example #11
0
  public void encodeWBMP(RenderedImage renderedImage, OutputStream os) throws IOException {

    BufferedImage bufferedImage = getBufferedImage(renderedImage);

    SampleModel sampleModel = bufferedImage.getSampleModel();

    int type = sampleModel.getDataType();

    if ((bufferedImage.getType() != BufferedImage.TYPE_BYTE_BINARY)
        || (type < DataBuffer.TYPE_BYTE)
        || (type > DataBuffer.TYPE_INT)
        || (sampleModel.getNumBands() != 1)
        || (sampleModel.getSampleSize(0) != 1)) {

      BufferedImage binaryImage =
          new BufferedImage(
              bufferedImage.getWidth(), bufferedImage.getHeight(), BufferedImage.TYPE_BYTE_BINARY);

      Graphics graphics = binaryImage.getGraphics();

      graphics.drawImage(bufferedImage, 0, 0, null);

      renderedImage = binaryImage;
    }

    if (!ImageIO.write(renderedImage, "wbmp", os)) {

      // See http://www.jguru.com/faq/view.jsp?EID=127723

      os.write(0);
      os.write(0);
      os.write(toMultiByte(bufferedImage.getWidth()));
      os.write(toMultiByte(bufferedImage.getHeight()));

      DataBuffer dataBuffer = bufferedImage.getData().getDataBuffer();

      int size = dataBuffer.getSize();

      for (int i = 0; i < size; i++) {
        os.write((byte) dataBuffer.getElem(i));
      }
    }
  }
Example #12
0
  /**
   * Load a rastered image from file
   *
   * @param file the file to load
   * @return the rastered image
   * @throws IOException
   */
  public int[][][] fromFileMultipleChannels(File file) throws IOException {
    BufferedImage image = ImageIO.read(file);
    image = scalingIfNeed(image, channels > 3);

    int w = image.getWidth(), h = image.getHeight();
    int bands = image.getSampleModel().getNumBands();
    int[][][] ret = new int[channels][h][w];
    byte[] pixels = ((DataBufferByte) image.getRaster().getDataBuffer()).getData();

    for (int i = 0; i < h; i++) {
      for (int j = 0; j < w; j++) {
        for (int k = 0; k < channels; k++) {
          if (k >= bands) break;
          ret[k][i][j] = pixels[channels * w * i + channels * j + k];
        }
      }
    }
    return ret;
  }
Example #13
0
  public RenderedImage decodeAsRenderedImage(int page) throws IOException {
    if (page != 0) {
      throw new IOException(JaiI18N.getString(JaiI18N.getString("WBMPImageDecoder0")));
    }

    input.read(); // TypeField
    input.read(); // FixHeaderField

    // Image width
    int value = input.read();
    int width = value & 0x7f;
    while ((value & 0x80) == 0x80) {
      width <<= 7;
      value = input.read();
      width |= (value & 0x7f);
    }

    // Image height
    value = input.read();
    int height = value & 0x7f;
    while ((value & 0x80) == 0x80) {
      height <<= 7;
      value = input.read();
      height |= (value & 0x7f);
    }

    // Create byte-packed bilevel image width an IndexColorModel
    BufferedImage bi = new BufferedImage(width, height, BufferedImage.TYPE_BYTE_BINARY);

    // Get the image tile.
    WritableRaster tile = bi.getWritableTile(0, 0);

    // Get the SampleModel.
    MultiPixelPackedSampleModel sm = (MultiPixelPackedSampleModel) bi.getSampleModel();

    // Read the data.
    input.readFully(
        ((DataBufferByte) tile.getDataBuffer()).getData(), 0, height * sm.getScanlineStride());

    return bi;
  }
  /*
   * This method averages the pixel values around a central point and return the
   * average as an instance of Color. The point coordinates are proportional to
   * the image.
   */
  private Color averageAround(BufferedImage i, double px, double py) {
    // Get an iterator for the image.
    RandomIter iterator = RandomIterFactory.create(i, null);
    // Get memory for a pixel and for the accumulator.
    double[] pixel = new double[i.getSampleModel().getNumBands()];
    double[] accum = new double[3];
    int numPixels = 0;
    // Sample the pixels.

    for (double x = px * i.getWidth() - sampleSize; x < px * i.getWidth() + sampleSize; x++) {
      for (double y = py * i.getHeight() - sampleSize; y < py * i.getHeight() + sampleSize; y++) {
        iterator.getPixel((int) x, (int) y, pixel);
        accum[0] += pixel[0];
        accum[1] += pixel[1];
        accum[2] += pixel[2];
        numPixels++;
      }
    }
    // Average the accumulated values.
    accum[0] /= numPixels;
    accum[1] /= numPixels;
    accum[2] /= numPixels;
    return new Color((int) accum[0], (int) accum[1], (int) accum[2]);
  }
  public void write(BufferedImage image, AVList params) throws IOException {
    if (image == null) {
      String msg = Logging.getMessage("nullValue.ImageSource");
      Logging.logger().severe(msg);
      throw new IllegalArgumentException(msg);
    }

    if (0 == image.getWidth() || 0 == image.getHeight()) {
      String msg =
          Logging.getMessage("generic.InvalidImageSize", image.getWidth(), image.getHeight());
      Logging.logger().severe(msg);
      throw new IllegalArgumentException(msg);
    }

    if (null == params || 0 == params.getValues().size()) {
      String reason = Logging.getMessage("nullValue.AVListIsNull");
      Logging.logger().finest(Logging.getMessage("GeotiffWriter.GeoKeysMissing", reason));
      params = new AVListImpl();
    } else {
      this.validateParameters(params, image.getWidth(), image.getHeight());
    }

    // how we proceed in part depends upon the image type...
    int type = image.getType();

    // handle CUSTOM type which comes from our GeoTiffreader (for now)
    if (BufferedImage.TYPE_CUSTOM == type) {
      int numColorComponents = 0, numComponents = 0, pixelSize = 0, dataType = 0, csType = 0;
      boolean hasAlpha = false;

      if (null != image.getColorModel()) {
        ColorModel cm = image.getColorModel();

        numColorComponents = cm.getNumColorComponents();
        numComponents = cm.getNumComponents();
        pixelSize = cm.getPixelSize();
        hasAlpha = cm.hasAlpha();

        ColorSpace cs = cm.getColorSpace();
        if (null != cs) csType = cs.getType();
      }

      if (null != image.getSampleModel()) {
        SampleModel sm = image.getSampleModel();
        dataType = sm.getDataType();
      }

      if (dataType == DataBuffer.TYPE_FLOAT && pixelSize == Float.SIZE && numComponents == 1) {
        type = BufferedImage_TYPE_ELEVATION_FLOAT32;
      } else if (dataType == DataBuffer.TYPE_SHORT
          && pixelSize == Short.SIZE
          && numComponents == 1) {
        type = BufferedImage_TYPE_ELEVATION_SHORT16;
      } else if (ColorSpace.CS_GRAY == csType && pixelSize == Byte.SIZE) {
        type = BufferedImage.TYPE_BYTE_GRAY;
      } else if (dataType == DataBuffer.TYPE_USHORT
          && ColorSpace.CS_GRAY == csType
          && pixelSize == Short.SIZE) {
        type = BufferedImage.TYPE_USHORT_GRAY;
      } else if (ColorSpace.TYPE_RGB == csType
          && pixelSize == 3 * Byte.SIZE
          && numColorComponents == 3) {
        type = BufferedImage.TYPE_3BYTE_BGR;
      } else if (ColorSpace.TYPE_RGB == csType
          && hasAlpha
          && pixelSize == 4 * Byte.SIZE
          && numComponents == 4) {
        type = BufferedImage.TYPE_4BYTE_ABGR;
      }
    }

    switch (type) {
      case BufferedImage.TYPE_3BYTE_BGR:
      case BufferedImage.TYPE_4BYTE_ABGR:
      case BufferedImage.TYPE_4BYTE_ABGR_PRE:
      case BufferedImage.TYPE_INT_RGB:
      case BufferedImage.TYPE_INT_BGR:
      case BufferedImage.TYPE_INT_ARGB:
      case BufferedImage.TYPE_INT_ARGB_PRE:
        {
          this.writeColorImage(image, params);
        }
        break;

      case BufferedImage.TYPE_USHORT_GRAY:
      case BufferedImage.TYPE_BYTE_GRAY:
        {
          this.writeGrayscaleImage(image, params);
        }
        break;

      case BufferedImage_TYPE_ELEVATION_SHORT16:
      case BufferedImage_TYPE_ELEVATION_FLOAT32:
        {
          String msg = Logging.getMessage("GeotiffWriter.FeatureNotImplementedd", type);
          Logging.logger().severe(msg);
          throw new IllegalArgumentException(msg);
        }
        //            break;

      case BufferedImage.TYPE_CUSTOM:
      default:
        {
          ColorModel cm = image.getColorModel();
          SampleModel sm = image.getSampleModel();

          StringBuffer sb =
              new StringBuffer(Logging.getMessage("GeotiffWriter.UnsupportedType", type));

          sb.append("\n");
          sb.append("NumBands=").append(sm.getNumBands()).append("\n");
          sb.append("NumDataElements=").append(sm.getNumDataElements()).append("\n");
          sb.append("NumColorComponents=").append(cm.getNumColorComponents()).append("\n");
          sb.append("NumComponents=").append(cm.getNumComponents()).append("\n");
          sb.append("PixelSize=").append(cm.getPixelSize()).append("\n");
          sb.append("hasAlpha=").append(cm.hasAlpha());

          String msg = sb.toString();
          Logging.logger().severe(msg);
          throw new IllegalArgumentException(msg);
        }
    }
  }
  private void writeGrayscaleImage(BufferedImage image, AVList params) throws IOException {
    int type = image.getType();

    int bitsPerSample =
        (BufferedImage.TYPE_USHORT_GRAY == type)
            ? Tiff.BitsPerSample.MONOCHROME_UINT16
            : Tiff.BitsPerSample.MONOCHROME_UINT8;

    int numBands = image.getSampleModel().getNumBands();
    // well, numBands for GrayScale images must be 1

    int bytesPerSample = numBands * bitsPerSample / Byte.SIZE;

    this.writeTiffHeader();

    // write the image data...
    int numRows = image.getHeight();
    int numCols = image.getWidth();
    int[] stripCounts = new int[numRows];
    int[] stripOffsets = new int[numRows];
    ByteBuffer dataBuff = ByteBuffer.allocateDirect(numCols * bytesPerSample);
    Raster rast = image.getRaster();

    for (int i = 0; i < numRows; i++) {
      stripOffsets[i] = (int) this.theChannel.position();
      stripCounts[i] = numCols * bytesPerSample;
      int[] rowData = rast.getPixels(0, i, image.getWidth(), 1, (int[]) null);
      dataBuff.clear();

      if (BufferedImage.TYPE_USHORT_GRAY == type) {
        for (int j = 0; j < numCols * numBands; j++) {
          this.putUnsignedShort(dataBuff, rowData[j]);
        }
      } else if (BufferedImage.TYPE_BYTE_GRAY == type) {
        for (int j = 0; j < numCols * numBands; j++) {
          this.putUnsignedByte(dataBuff, rowData[j]);
        }
      }
      dataBuff.flip();
      this.theChannel.write(dataBuff);
    }

    // write out values for the tiff tags and build up the IFD. These are supposed to be sorted; for
    // now
    // do this manually here.
    ArrayList<TiffIFDEntry> ifds = new ArrayList<TiffIFDEntry>(10);

    ifds.add(new TiffIFDEntry(Tiff.Tag.IMAGE_WIDTH, Tiff.Type.LONG, 1, numCols));
    ifds.add(new TiffIFDEntry(Tiff.Tag.IMAGE_LENGTH, Tiff.Type.LONG, 1, numRows));
    ifds.add(new TiffIFDEntry(Tiff.Tag.BITS_PER_SAMPLE, Tiff.Type.SHORT, 1, bitsPerSample));
    ifds.add(new TiffIFDEntry(Tiff.Tag.COMPRESSION, Tiff.Type.LONG, 1, Tiff.Compression.NONE));
    ifds.add(
        new TiffIFDEntry(
            Tiff.Tag.PHOTO_INTERPRETATION,
            Tiff.Type.SHORT,
            1,
            Tiff.Photometric.Grayscale_BlackIsZero));
    ifds.add(
        new TiffIFDEntry(Tiff.Tag.SAMPLE_FORMAT, Tiff.Type.SHORT, 1, Tiff.SampleFormat.UNSIGNED));

    long offset = this.theChannel.position();
    dataBuff = ByteBuffer.allocateDirect(stripOffsets.length * INTEGER_SIZEOF);
    for (int stripOffset : stripOffsets) {
      dataBuff.putInt(stripOffset);
    }
    dataBuff.flip();
    this.theChannel.write(dataBuff);
    ifds.add(new TiffIFDEntry(Tiff.Tag.STRIP_OFFSETS, Tiff.Type.LONG, stripOffsets.length, offset));

    ifds.add(new TiffIFDEntry(Tiff.Tag.SAMPLES_PER_PIXEL, Tiff.Type.SHORT, 1, numBands));
    ifds.add(new TiffIFDEntry(Tiff.Tag.ROWS_PER_STRIP, Tiff.Type.LONG, 1, 1));

    offset = this.theChannel.position();
    dataBuff.clear(); // stripOffsets and stripCounts are same length by design; can reuse the
    // ByteBuffer...

    for (int stripCount : stripCounts) {
      dataBuff.putInt(stripCount);
    }
    dataBuff.flip();
    this.theChannel.write(dataBuff);
    ifds.add(
        new TiffIFDEntry(Tiff.Tag.STRIP_BYTE_COUNTS, Tiff.Type.LONG, stripCounts.length, offset));

    this.appendGeoTiff(ifds, params);

    this.writeIFDs(ifds);
  }
 /**
  * Decompress the JPEG source image associated with this decompressor instance and output a
  * decompressed image to the given <code>BufferedImage</code> instance.
  *
  * @param dstImage a <code>BufferedImage</code> instance that will receive the decompressed image
  * @param flags the bitwise OR of one or more of {@link TJ TJ.FLAG_*}
  */
 public void decompress(BufferedImage dstImage, int flags) throws Exception {
   if (dstImage == null || flags < 0) throw new Exception("Invalid argument in decompress()");
   int desiredWidth = dstImage.getWidth();
   int desiredHeight = dstImage.getHeight();
   int scaledWidth = getScaledWidth(desiredWidth, desiredHeight);
   int scaledHeight = getScaledHeight(desiredWidth, desiredHeight);
   if (scaledWidth != desiredWidth || scaledHeight != desiredHeight)
     throw new Exception(
         "BufferedImage dimensions do not match a scaled image size that TurboJPEG is capable of generating.");
   int pixelFormat;
   boolean intPixels = false;
   if (byteOrder == null) byteOrder = ByteOrder.nativeOrder();
   switch (dstImage.getType()) {
     case BufferedImage.TYPE_3BYTE_BGR:
       pixelFormat = TJ.PF_BGR;
       break;
     case BufferedImage.TYPE_4BYTE_ABGR:
     case BufferedImage.TYPE_4BYTE_ABGR_PRE:
       pixelFormat = TJ.PF_XBGR;
       break;
     case BufferedImage.TYPE_BYTE_GRAY:
       pixelFormat = TJ.PF_GRAY;
       break;
     case BufferedImage.TYPE_INT_BGR:
       if (byteOrder == ByteOrder.BIG_ENDIAN) pixelFormat = TJ.PF_XBGR;
       else pixelFormat = TJ.PF_RGBX;
       intPixels = true;
       break;
     case BufferedImage.TYPE_INT_RGB:
       if (byteOrder == ByteOrder.BIG_ENDIAN) pixelFormat = TJ.PF_XRGB;
       else pixelFormat = TJ.PF_BGRX;
       intPixels = true;
       break;
     case BufferedImage.TYPE_INT_ARGB:
     case BufferedImage.TYPE_INT_ARGB_PRE:
       if (byteOrder == ByteOrder.BIG_ENDIAN) pixelFormat = TJ.PF_ARGB;
       else pixelFormat = TJ.PF_BGRA;
       intPixels = true;
       break;
     default:
       throw new Exception("Unsupported BufferedImage format");
   }
   WritableRaster wr = dstImage.getRaster();
   if (intPixels) {
     SinglePixelPackedSampleModel sm = (SinglePixelPackedSampleModel) dstImage.getSampleModel();
     int stride = sm.getScanlineStride();
     DataBufferInt db = (DataBufferInt) wr.getDataBuffer();
     int[] buf = db.getData();
     if (jpegBuf == null) throw new Exception(NO_ASSOC_ERROR);
     decompress(jpegBuf, jpegBufSize, buf, scaledWidth, stride, scaledHeight, pixelFormat, flags);
   } else {
     ComponentSampleModel sm = (ComponentSampleModel) dstImage.getSampleModel();
     int pixelSize = sm.getPixelStride();
     if (pixelSize != TJ.getPixelSize(pixelFormat))
       throw new Exception("Inconsistency between pixel format and pixel size in BufferedImage");
     int pitch = sm.getScanlineStride();
     DataBufferByte db = (DataBufferByte) wr.getDataBuffer();
     byte[] buf = db.getData();
     decompress(buf, scaledWidth, pitch, scaledHeight, pixelFormat, flags);
   }
 }
  /**
   * Binds the BufferedImage byte-stream into video memory. BufferedImage must be in 4BYTE_ABGR.
   * 4BYTE_ABGR removes endinese problems.
   */
  public Texture bind(final BufferedImage _image, final InternalFormat _format) {
    final GL3 gl = GLRenderer.getCanvas().getContext().getCurrentGL().getGL3();
    if (gl == null) {
      System.out.println("GL context doesn't exist");
      return null;
    }

    gl.glEnable(GL.GL_TEXTURE_2D);

    final int textureID = glGenTextures(gl);
    gl.glBindTexture(GL3.GL_TEXTURE_2D, textureID);

    gl.glTexParameteri(GL3.GL_TEXTURE_2D, GL3.GL_TEXTURE_WRAP_S, GL3.GL_REPEAT);
    gl.glTexParameteri(GL3.GL_TEXTURE_2D, GL3.GL_TEXTURE_WRAP_T, GL3.GL_REPEAT);
    gl.glTexParameteri(GL3.GL_TEXTURE_2D, GL3.GL_TEXTURE_MAG_FILTER, GL3.GL_LINEAR);
    gl.glTexParameteri(GL3.GL_TEXTURE_2D, GL3.GL_TEXTURE_MIN_FILTER, GL3.GL_LINEAR_MIPMAP_LINEAR);

    final int width = _image.getWidth();
    final int height = _image.getHeight();
    final int channels = _image.getSampleModel().getNumBands();
    int internalFormat = GL3.GL_RGB;

    if (gl.isExtensionAvailable("GL_EXT_abgr") == true) {
      switch (channels) {
        case 4:
          imageFormat = GL2.GL_ABGR_EXT;
          break;
        case 3:
          imageFormat = GL3.GL_BGR;
          break;
        case 1:
          imageFormat = GL3.GL_RED;
          break;
      }
    } else {
      switch (channels) {
        case 4:
          imageFormat = GL3.GL_RGBA;
          break;
        case 3:
          imageFormat = GL3.GL_RGB;
          break;
        case 1:
          imageFormat = GL3.GL_RED;
          break;
      }
    }

    gl.glPixelStorei(GL3.GL_UNPACK_ALIGNMENT, 1);
    gl.glTexImage2D(
        GL3.GL_TEXTURE_2D,
        0,
        getGLInternalFormat(channels, _format),
        width,
        height,
        0,
        imageFormat,
        GL3.GL_UNSIGNED_BYTE,
        getByteBuffer(_image));

    gl.glGenerateMipmap(GL3.GL_TEXTURE_2D);
    gl.glBindTexture(GL.GL_TEXTURE_2D, 0); // Reset to default texture

    return new Texture(new GLImage(textureID, width, height));
  }
Example #19
0
  public BufferedImage read(int imageIndex, ImageReadParam param) throws IOException {
    checkIndex(imageIndex);
    readHeader();

    if (iis == null) throw new IllegalStateException("input is null");

    BufferedImage img;

    clearAbortRequest();
    processImageStarted(imageIndex);

    if (param == null) param = getDefaultReadParam();

    sourceRegion = new Rectangle(0, 0, 0, 0);
    destinationRegion = new Rectangle(0, 0, 0, 0);

    computeRegions(
        param, this.width, this.height, param.getDestination(), sourceRegion, destinationRegion);

    scaleX = param.getSourceXSubsampling();
    scaleY = param.getSourceYSubsampling();

    // If the destination band is set used it
    sourceBands = param.getSourceBands();
    destBands = param.getDestinationBands();

    seleBand = (sourceBands != null) && (destBands != null);
    noTransform = destinationRegion.equals(new Rectangle(0, 0, width, height)) || seleBand;

    if (!seleBand) {
      sourceBands = new int[colorPlanes];
      destBands = new int[colorPlanes];
      for (int i = 0; i < colorPlanes; i++) destBands[i] = sourceBands[i] = i;
    }

    // If the destination is provided, then use it.  Otherwise, create new one
    bi = param.getDestination();

    // Get the image data.
    WritableRaster raster = null;

    if (bi == null) {
      if (sampleModel != null && colorModel != null) {
        sampleModel =
            sampleModel.createCompatibleSampleModel(
                destinationRegion.width + destinationRegion.x,
                destinationRegion.height + destinationRegion.y);
        if (seleBand) sampleModel = sampleModel.createSubsetSampleModel(sourceBands);
        raster = Raster.createWritableRaster(sampleModel, new Point(0, 0));
        bi = new BufferedImage(colorModel, raster, false, null);
      }
    } else {
      raster = bi.getWritableTile(0, 0);
      sampleModel = bi.getSampleModel();
      colorModel = bi.getColorModel();

      noTransform &= destinationRegion.equals(raster.getBounds());
    }

    byte bdata[] = null; // buffer for byte data

    if (sampleModel.getDataType() == DataBuffer.TYPE_BYTE)
      bdata = (byte[]) ((DataBufferByte) raster.getDataBuffer()).getData();

    readImage(bdata);

    if (abortRequested()) processReadAborted();
    else processImageComplete();

    return bi;
  }