public void runTest(Object ctx, int numReps) {
   final Context ictx = (Context) ctx;
   final ImageInputStream iis = ictx.inputStream;
   final int length = ictx.length;
   int pos = 0;
   try {
     iis.mark();
     do {
       if (pos + 4 > length) {
         iis.reset();
         iis.mark();
         pos = 0;
       }
       iis.readUnsignedInt();
       pos += 4;
     } while (--numReps >= 0);
   } catch (IOException e) {
     e.printStackTrace();
   } finally {
     try {
       iis.reset();
     } catch (IOException e) {
     }
   }
 }
 public void runTest(Object ctx, int numReps) {
   final Context ictx = (Context) ctx;
   final ImageInputStream iis = ictx.inputStream;
   final int scanlineStride = ictx.scanlineStride;
   final int length = ictx.length;
   int pos = 0;
   try {
     iis.mark();
     do {
       if (pos + scanlineStride > length) {
         iis.reset();
         iis.mark();
         pos = 0;
       }
       iis.skipBytes(scanlineStride);
       pos += scanlineStride;
     } while (--numReps >= 0);
   } catch (IOException e) {
     e.printStackTrace();
   } finally {
     try {
       iis.reset();
     } catch (IOException e) {
     }
   }
 }
  public boolean canDecodeInput(Object input) throws IOException {

    // The input source must be an ImageInputStream because the constructor
    // passes STANDARD_INPUT_TYPE (an array consisting of ImageInputStream)
    // as the only type of input source that it will deal with to its
    // superclass.

    if (!(input instanceof ImageInputStream)) return false;

    ImageInputStream stream = (ImageInputStream) input;

    /** Read and validate the input source's header. */
    byte[] header = new byte[8];
    try {
      // The input source's current position must be preserved so that
      // other ImageReaderSpis can determine if they can decode the input
      // source's format, should this input source be unable to handle the
      // decoding. Because the input source is an ImageInputStream, its
      // mark() and reset() methods are called to preserve the current
      // position.

      stream.mark();
      stream.read(header);
      stream.reset();
    } catch (IOException e) {
      return false;
    }

    byte[] controlHeader = new byte[] {(byte) 151, 74, 66, 50, 13, 10, 26, 10};

    return Arrays.equals(controlHeader, header);
  }
Example #4
0
  public boolean canDecodeInput(Object source) throws IOException {
    if (!(source instanceof ImageInputStream)) {
      return false;
    }

    ImageInputStream stream = (ImageInputStream) source;

    stream.mark();
    int type = stream.readByte(); // TypeField
    byte fixHeaderField = stream.readByte();

    int width = ImageUtil.readMultiByteInteger(stream);
    int height = ImageUtil.readMultiByteInteger(stream);

    long remainingBytes = stream.length() - stream.getStreamPosition();
    stream.reset();

    // check WBMP "header"
    if (type != 0 || fixHeaderField != 0) {
      // while WBMP reader does not support ext WBMP headers
      return false;
    }

    // check image dimension
    if (width <= 0 || height <= 0) {
      return false;
    }

    long scanSize = (width / 8) + ((width % 8) == 0 ? 0 : 1);

    return (remainingBytes == scanSize * height);
  }
  /**
   * Returns the geotiff metadata for this geotiff file.
   *
   * @return the metadata
   */
  public GeoTiffIIOMetadataDecoder getMetadata() {
    GeoTiffIIOMetadataDecoder metadata = null;
    ImageReader reader = null;
    boolean closeMe = true;
    ImageInputStream stream = null;

    try {
      if ((source instanceof InputStream) || (source instanceof ImageInputStream)) {
        closeMe = false;
      }
      if (source instanceof ImageInputStream) {
        stream = (ImageInputStream) source;
      } else {
        inStreamSPI = ImageIOExt.getImageInputStreamSPI(source);
        if (inStreamSPI == null) {
          throw new IllegalArgumentException("No input stream for the provided source");
        }
        stream =
            inStreamSPI.createInputStreamInstance(
                source, ImageIO.getUseCache(), ImageIO.getCacheDirectory());
      }
      if (stream == null) {
        throw new IllegalArgumentException("No input stream for the provided source");
      }
      stream.mark();
      reader = READER_SPI.createReaderInstance();
      reader.setInput(stream);
      final IIOMetadata iioMetadata = reader.getImageMetadata(0);
      metadata = new GeoTiffIIOMetadataDecoder(iioMetadata);
    } catch (IOException e) {
      if (LOGGER.isLoggable(Level.SEVERE)) {
        LOGGER.log(Level.SEVERE, e.getMessage(), e);
      }
    } finally {
      if (reader != null)
        try {
          reader.dispose();
        } catch (Throwable t) {
        }

      if (stream != null) {
        try {
          stream.reset();
        } catch (Throwable t) {
        }
        if (closeMe) {
          try {
            stream.close();
          } catch (Throwable t) {
          }
        }
      }
    }
    return metadata;
  }
Example #6
0
 BufferedImage getThumbnail(ImageInputStream iis, JPEGImageReader reader) throws IOException {
   iis.mark();
   iis.seek(streamPos);
   JPEGImageReader thumbReader = new JPEGImageReader(null);
   thumbReader.setInput(iis);
   thumbReader.addIIOReadProgressListener(new ThumbnailReadListener(reader));
   BufferedImage ret = thumbReader.read(0, null);
   thumbReader.dispose();
   iis.reset();
   return ret;
 }
  public boolean canDecodeInput(Object source) throws IOException {
    if (!(source instanceof ImageInputStream)) {
      return false;
    }

    ImageInputStream stream = (ImageInputStream) source;
    byte[] b = new byte[2];
    stream.mark();
    stream.readFully(b);
    stream.reset();

    return (b[0] == 0x42) && (b[1] == 0x4d);
  }
Example #8
0
    BufferedImage getThumbnail(ImageInputStream iis, JPEGImageReader reader) throws IOException {
      iis.mark();
      iis.seek(streamPos);
      DataBufferByte buffer = new DataBufferByte(getLength());
      readByteBuffer(iis, buffer.getData(), reader, 1.0F, 0.0F);
      iis.reset();

      WritableRaster raster =
          Raster.createInterleavedRaster(
              buffer, thumbWidth, thumbHeight, thumbWidth * 3, 3, new int[] {0, 1, 2}, null);
      ColorModel cm =
          new ComponentColorModel(
              JPEG.JCS.sRGB, false, false, ColorModel.OPAQUE, DataBuffer.TYPE_BYTE);
      return new BufferedImage(cm, raster, false, null);
    }
Example #9
0
    BufferedImage getThumbnail(ImageInputStream iis, JPEGImageReader reader) throws IOException {
      iis.mark();
      iis.seek(streamPos);
      // read the palette
      byte[] palette = new byte[PALETTE_SIZE];
      float palettePart = ((float) PALETTE_SIZE) / getLength();
      readByteBuffer(iis, palette, reader, palettePart, 0.0F);
      DataBufferByte buffer = new DataBufferByte(thumbWidth * thumbHeight);
      readByteBuffer(iis, buffer.getData(), reader, 1.0F - palettePart, palettePart);
      iis.read();
      iis.reset();

      IndexColorModel cm = new IndexColorModel(8, 256, palette, 0, false);
      SampleModel sm = cm.createCompatibleSampleModel(thumbWidth, thumbHeight);
      WritableRaster raster = Raster.createWritableRaster(sm, buffer, null);
      return new BufferedImage(cm, raster, false, null);
    }
  public boolean canDecodeInput(Object input) throws IOException {
    if (!(input instanceof ImageInputStream)) {
      return false;
    }

    ImageInputStream stream = (ImageInputStream) input;
    byte[] b = new byte[4];
    stream.mark();
    stream.readFully(b);
    stream.reset();

    return ((b[0] == (byte) 0x49
            && b[1] == (byte) 0x49
            && b[2] == (byte) 0x2a
            && b[3] == (byte) 0x00)
        || (b[0] == (byte) 0x4d
            && b[1] == (byte) 0x4d
            && b[2] == (byte) 0x00
            && b[3] == (byte) 0x2a));
  }
Example #11
0
  public void initialize(
      ImageInputStream stream, boolean ignoreUnknownFields, final boolean isBTIFF)
      throws IOException {
    removeTIFFFields();

    List tagSetList = getTagSetList();

    final long numEntries;
    if (isBTIFF) numEntries = stream.readLong();
    else numEntries = stream.readUnsignedShort();

    for (int i = 0; i < numEntries; i++) {
      // Read tag number, value type, and value count.
      int tag = stream.readUnsignedShort();
      int type = stream.readUnsignedShort();
      int count;
      if (isBTIFF) {
        long count_ = stream.readLong();
        count = (int) count_;
        if (count != count_)
          throw new IllegalArgumentException("unable to use long number of values");
      } else count = (int) stream.readUnsignedInt();

      // Get the associated TIFFTag.
      TIFFTag tiffTag = getTag(tag, tagSetList);

      // Ignore unknown fields.
      if (ignoreUnknownFields && tiffTag == null) {
        // Skip the value/offset so as to leave the stream
        // position at the start of the next IFD entry.

        if (isBTIFF) stream.skipBytes(8);
        else stream.skipBytes(4);

        // XXX Warning message ...

        // Continue with the next IFD entry.
        continue;
      }

      long nextTagOffset;

      if (isBTIFF) {
        nextTagOffset = stream.getStreamPosition() + 8;
        int sizeOfType = TIFFTag.getSizeOfType(type);
        if (count * sizeOfType > 8) {
          long value = stream.readLong();
          stream.seek(value);
        }
      } else {
        nextTagOffset = stream.getStreamPosition() + 4;
        int sizeOfType = TIFFTag.getSizeOfType(type);
        if (count * sizeOfType > 4) {
          long value = stream.readUnsignedInt();
          stream.seek(value);
        }
      }

      if (tag == BaselineTIFFTagSet.TAG_STRIP_BYTE_COUNTS
          || tag == BaselineTIFFTagSet.TAG_TILE_BYTE_COUNTS
          || tag == BaselineTIFFTagSet.TAG_JPEG_INTERCHANGE_FORMAT_LENGTH) {
        this.stripOrTileByteCountsPosition = stream.getStreamPosition();
        if (LAZY_LOADING) {
          type = type == TIFFTag.TIFF_LONG ? TIFFTag.TIFF_LAZY_LONG : TIFFTag.TIFF_LAZY_LONG8;
        }
      } else if (tag == BaselineTIFFTagSet.TAG_STRIP_OFFSETS
          || tag == BaselineTIFFTagSet.TAG_TILE_OFFSETS
          || tag == BaselineTIFFTagSet.TAG_JPEG_INTERCHANGE_FORMAT) {
        this.stripOrTileOffsetsPosition = stream.getStreamPosition();
        if (LAZY_LOADING) {
          type = type == TIFFTag.TIFF_LONG ? TIFFTag.TIFF_LAZY_LONG : TIFFTag.TIFF_LAZY_LONG8;
        }
      }

      Object obj = null;

      try {
        switch (type) {
          case TIFFTag.TIFF_BYTE:
          case TIFFTag.TIFF_SBYTE:
          case TIFFTag.TIFF_UNDEFINED:
          case TIFFTag.TIFF_ASCII:
            byte[] bvalues = new byte[count];
            stream.readFully(bvalues, 0, count);

            if (type == TIFFTag.TIFF_ASCII) {
              // Can be multiple strings
              final List<String> v = new ArrayList<String>();
              boolean inString = false;
              int prevIndex = 0;
              for (int index = 0; index <= count; index++) {
                if (index < count && bvalues[index] != 0) {
                  if (!inString) {
                    // start of string
                    prevIndex = index;
                    inString = true;
                  }
                } else { // null or special case at end of string
                  if (inString) {
                    // end of string
                    final String s = new String(bvalues, prevIndex, index - prevIndex);
                    v.add(s);
                    inString = false;
                  }
                }
              }

              count = v.size();
              String[] strings;
              if (count != 0) {
                strings = new String[count];
                for (int c = 0; c < count; c++) {
                  strings[c] = v.get(c);
                }
              } else {
                // This case has been observed when the value of
                // 'count' recorded in the field is non-zero but
                // the value portion contains all nulls.
                count = 1;
                strings = new String[] {""};
              }

              obj = strings;
            } else {
              obj = bvalues;
            }
            break;

          case TIFFTag.TIFF_SHORT:
            char[] cvalues = new char[count];
            for (int j = 0; j < count; j++) {
              cvalues[j] = (char) (stream.readUnsignedShort());
            }
            obj = cvalues;
            break;

          case TIFFTag.TIFF_LONG:
          case TIFFTag.TIFF_IFD_POINTER:
            long[] lvalues = new long[count];
            for (int j = 0; j < count; j++) {
              lvalues[j] = stream.readUnsignedInt();
            }
            obj = lvalues;
            break;

          case TIFFTag.TIFF_RATIONAL:
            long[][] llvalues = new long[count][2];
            for (int j = 0; j < count; j++) {
              llvalues[j][0] = stream.readUnsignedInt();
              llvalues[j][1] = stream.readUnsignedInt();
            }
            obj = llvalues;
            break;

          case TIFFTag.TIFF_SSHORT:
            short[] svalues = new short[count];
            for (int j = 0; j < count; j++) {
              svalues[j] = stream.readShort();
            }
            obj = svalues;
            break;

          case TIFFTag.TIFF_SLONG:
            int[] ivalues = new int[count];
            for (int j = 0; j < count; j++) {
              ivalues[j] = stream.readInt();
            }
            obj = ivalues;
            break;

          case TIFFTag.TIFF_SRATIONAL:
            int[][] iivalues = new int[count][2];
            for (int j = 0; j < count; j++) {
              iivalues[j][0] = stream.readInt();
              iivalues[j][1] = stream.readInt();
            }
            obj = iivalues;
            break;

          case TIFFTag.TIFF_FLOAT:
            float[] fvalues = new float[count];
            for (int j = 0; j < count; j++) {
              fvalues[j] = stream.readFloat();
            }
            obj = fvalues;
            break;

          case TIFFTag.TIFF_DOUBLE:
            double[] dvalues = new double[count];
            for (int j = 0; j < count; j++) {
              dvalues[j] = stream.readDouble();
            }
            obj = dvalues;
            break;

          case TIFFTag.TIFF_LONG8:
          case TIFFTag.TIFF_SLONG8:
          case TIFFTag.TIFF_IFD8:
            long[] lBvalues = new long[count];
            for (int j = 0; j < count; j++) {
              lBvalues[j] = stream.readLong();
            }
            obj = lBvalues;
            break;

          case TIFFTag.TIFF_LAZY_LONG8:
          case TIFFTag.TIFF_LAZY_LONG:
            obj = new TIFFLazyData(stream, type, count);
            break;
          default:
            // XXX Warning
            break;
        }
      } catch (EOFException eofe) {
        // The TIFF 6.0 fields have tag numbers less than or equal
        // to 532 (ReferenceBlackWhite) or equal to 33432 (Copyright).
        // If there is an error reading a baseline tag, then re-throw
        // the exception and fail; otherwise continue with the next
        // field.
        if (BaselineTIFFTagSet.getInstance().getTag(tag) == null) {
          throw eofe;
        }
      }

      if (tiffTag == null) {
        // XXX Warning: unknown tag
      } else if (!tiffTag.isDataTypeOK(type)) {
        // XXX Warning: bad data type
      } else if (tiffTag.isIFDPointer() && obj != null) {
        stream.mark();
        stream.seek(((long[]) obj)[0]);

        List tagSets = new ArrayList(1);
        tagSets.add(tiffTag.getTagSet());
        TIFFIFD subIFD = new TIFFIFD(tagSets);

        // XXX Use same ignore policy for sub-IFD fields?
        subIFD.initialize(stream, ignoreUnknownFields);
        obj = subIFD;
        stream.reset();
      }

      if (tiffTag == null) {
        tiffTag = new TIFFTag(null, tag, 1 << type, null);
      }

      // Add the field if its contents have been initialized which
      // will not be the case if an EOF was ignored above.
      if (obj != null) {
        TIFFField f = new TIFFField(tiffTag, type, count, obj);
        addTIFFField(f);
      }

      stream.seek(nextTagOffset);
    }

    this.lastPosition = stream.getStreamPosition();
  }
Example #12
0
  private void readHeader() throws IOException {
    if (gotHeader) {
      iis.seek(128);
      return;
    }

    metadata = new PCXMetadata();

    manufacturer = iis.readByte(); // manufacturer
    if (manufacturer != MANUFACTURER) throw new IllegalStateException("image is not a PCX file");
    metadata.version = iis.readByte(); // version
    encoding = iis.readByte(); // encoding
    if (encoding != ENCODING)
      throw new IllegalStateException("image is not a PCX file, invalid encoding " + encoding);

    metadata.bitsPerPixel = iis.readByte();

    metadata.xmin = iis.readShort();
    metadata.ymin = iis.readShort();
    xmax = iis.readShort();
    ymax = iis.readShort();

    metadata.hdpi = iis.readShort();
    metadata.vdpi = iis.readShort();

    iis.readFully(smallPalette);

    iis.readByte(); // reserved

    colorPlanes = iis.readByte();
    bytesPerLine = iis.readShort();
    paletteType = iis.readShort();

    metadata.hsize = iis.readShort();
    metadata.vsize = iis.readShort();

    iis.skipBytes(54); // skip filler

    width = xmax - metadata.xmin + 1;
    height = ymax - metadata.ymin + 1;

    if (colorPlanes == 1) {
      if (paletteType == PALETTE_GRAYSCALE) {
        ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_GRAY);
        int[] nBits = {8};
        colorModel =
            new ComponentColorModel(
                cs, nBits, false, false, Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
        sampleModel =
            new ComponentSampleModel(DataBuffer.TYPE_BYTE, width, height, 1, width, new int[] {0});
      } else {
        if (metadata.bitsPerPixel == 8) {
          // read palette from end of file, then reset back to image data
          iis.mark();

          if (iis.length() == -1) {
            // read until eof, and work backwards
            while (iis.read() != -1) ;
            iis.seek(iis.getStreamPosition() - 256 * 3 - 1);
          } else {
            iis.seek(iis.length() - 256 * 3 - 1);
          }

          int palletteMagic = iis.read();
          if (palletteMagic != 12)
            processWarningOccurred(
                "Expected palette magic number 12; instead read "
                    + palletteMagic
                    + " from this image.");

          iis.readFully(largePalette);
          iis.reset();

          colorModel = new IndexColorModel(metadata.bitsPerPixel, 256, largePalette, 0, false);
          sampleModel = colorModel.createCompatibleSampleModel(width, height);
        } else {
          int msize = metadata.bitsPerPixel == 1 ? 2 : 16;
          colorModel = new IndexColorModel(metadata.bitsPerPixel, msize, smallPalette, 0, false);
          sampleModel = colorModel.createCompatibleSampleModel(width, height);
        }
      }
    } else {
      ColorSpace cs = ColorSpace.getInstance(ColorSpace.CS_sRGB);
      int[] nBits = {8, 8, 8};
      colorModel =
          new ComponentColorModel(
              cs, nBits, false, false, Transparency.OPAQUE, DataBuffer.TYPE_BYTE);
      sampleModel =
          new ComponentSampleModel(
              DataBuffer.TYPE_BYTE,
              width,
              height,
              1,
              width * colorPlanes,
              new int[] {0, width, width * 2});
    }

    originalSampleModel = sampleModel;
    originalColorModel = colorModel;

    gotHeader = true;
  }
 public synchronized void reset() throws IOException {
   src.reset();
 }
Example #14
0
  private void decodeTile(int ti, int tj, int band) throws IOException {
    if (DEBUG) {
      System.out.println("decodeTile(" + ti + "," + tj + "," + band + ")");
    }

    // Compute the region covered by the strip or tile
    Rectangle tileRect =
        new Rectangle(
            ti * tileOrStripWidth, tj * tileOrStripHeight, tileOrStripWidth, tileOrStripHeight);

    // Clip against the image bounds if the image is not tiled. If it
    // is tiled, the tile may legally extend beyond the image bounds.
    if (!isImageTiled(currIndex)) {
      tileRect = tileRect.intersection(new Rectangle(0, 0, width, height));
    }

    // Return if the intersection is empty.
    if (tileRect.width <= 0 || tileRect.height <= 0) {
      return;
    }

    int srcMinX = tileRect.x;
    int srcMinY = tileRect.y;
    int srcWidth = tileRect.width;
    int srcHeight = tileRect.height;

    // Determine dest region that can be derived from the
    // source region

    dstMinX = iceil(srcMinX - sourceXOffset, srcXSubsampling);
    int dstMaxX = ifloor(srcMinX + srcWidth - 1 - sourceXOffset, srcXSubsampling);

    dstMinY = iceil(srcMinY - sourceYOffset, srcYSubsampling);
    int dstMaxY = ifloor(srcMinY + srcHeight - 1 - sourceYOffset, srcYSubsampling);

    dstWidth = dstMaxX - dstMinX + 1;
    dstHeight = dstMaxY - dstMinY + 1;

    dstMinX += dstXOffset;
    dstMinY += dstYOffset;

    // Clip against image bounds

    Rectangle dstRect =
        new Rectangle(
            dstMinX, dstMinY,
            dstWidth, dstHeight);
    dstRect = dstRect.intersection(theImage.getRaster().getBounds());

    dstMinX = dstRect.x;
    dstMinY = dstRect.y;
    dstWidth = dstRect.width;
    dstHeight = dstRect.height;

    if (dstWidth <= 0 || dstHeight <= 0) {
      return;
    }

    // Backwards map dest region to source to determine
    // active source region

    int activeSrcMinX = (dstMinX - dstXOffset) * srcXSubsampling + sourceXOffset;
    int sxmax = (dstMinX + dstWidth - 1 - dstXOffset) * srcXSubsampling + sourceXOffset;
    int activeSrcWidth = sxmax - activeSrcMinX + 1;

    int activeSrcMinY = (dstMinY - dstYOffset) * srcYSubsampling + sourceYOffset;
    int symax = (dstMinY + dstHeight - 1 - dstYOffset) * srcYSubsampling + sourceYOffset;
    int activeSrcHeight = symax - activeSrcMinY + 1;

    decompressor.setSrcMinX(srcMinX);
    decompressor.setSrcMinY(srcMinY);
    decompressor.setSrcWidth(srcWidth);
    decompressor.setSrcHeight(srcHeight);

    decompressor.setDstMinX(dstMinX);
    decompressor.setDstMinY(dstMinY);
    decompressor.setDstWidth(dstWidth);
    decompressor.setDstHeight(dstHeight);

    decompressor.setActiveSrcMinX(activeSrcMinX);
    decompressor.setActiveSrcMinY(activeSrcMinY);
    decompressor.setActiveSrcWidth(activeSrcWidth);
    decompressor.setActiveSrcHeight(activeSrcHeight);

    int tileIndex = tj * tilesAcross + ti;

    if (planarConfiguration == BaselineTIFFTagSet.PLANAR_CONFIGURATION_PLANAR) {
      tileIndex += band * tilesAcross * tilesDown;
    }

    long offset = getTileOrStripOffset(tileIndex);
    long byteCount = getTileOrStripByteCount(tileIndex);

    //
    // Attempt to handle truncated streams, i.e., where reading the
    // compressed strip or tile would result in an EOFException. The
    // number of bytes to read is clamped to the number available
    // from the stream starting at the indicated position in the hope
    // that the decompressor will handle it.
    //
    long streamLength = stream.length();
    if (streamLength > 0 && offset + byteCount > streamLength) {
      processWarningOccurred("Attempting to process truncated stream.");
      if (Math.max(byteCount = streamLength - offset, 0) == 0) {
        processWarningOccurred("No bytes in strip/tile: skipping.");
        return;
      }
    }

    decompressor.setStream(stream);
    decompressor.setOffset(offset);
    decompressor.setByteCount((int) byteCount);

    decompressor.beginDecoding();

    stream.mark();
    decompressor.decode();
    stream.reset();
  }
Example #15
0
  /**
   * Initializes these instance variables from the image metadata:
   *
   * <pre>
   * compression
   * width
   * height
   * samplesPerPixel
   * numBands
   * colorMap
   * photometricInterpretation
   * sampleFormat
   * bitsPerSample
   * extraSamples
   * tileOrStripWidth
   * tileOrStripHeight
   * </pre>
   */
  private void initializeFromMetadata() {
    TIFFField f;

    // Compression
    f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_COMPRESSION);
    if (f == null) {
      processWarningOccurred("Compression field is missing; assuming no compression");
      compression = BaselineTIFFTagSet.COMPRESSION_NONE;
    } else {
      compression = f.getAsInt(0);
    }

    // Whether key dimensional information is absent.
    boolean isMissingDimension = false;

    // ImageWidth -> width
    f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_IMAGE_WIDTH);
    if (f != null) {
      this.width = f.getAsInt(0);
    } else {
      processWarningOccurred("ImageWidth field is missing.");
      isMissingDimension = true;
    }

    // ImageLength -> height
    f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_IMAGE_LENGTH);
    if (f != null) {
      this.height = f.getAsInt(0);
    } else {
      processWarningOccurred("ImageLength field is missing.");
      isMissingDimension = true;
    }

    // SamplesPerPixel
    f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_SAMPLES_PER_PIXEL);
    if (f != null) {
      samplesPerPixel = f.getAsInt(0);
    } else {
      samplesPerPixel = 1;
      isMissingDimension = true;
    }

    // If any dimension is missing and there is a JPEG stream available
    // get the information from it.
    int defaultBitDepth = 1;
    if (isMissingDimension
        && (f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_JPEG_INTERCHANGE_FORMAT))
            != null) {
      Iterator iter = ImageIO.getImageReadersByFormatName("JPEG");
      if (iter != null && iter.hasNext()) {
        ImageReader jreader = (ImageReader) iter.next();
        try {
          stream.mark();
          stream.seek(f.getAsLong(0));
          jreader.setInput(stream);
          if (imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_IMAGE_WIDTH) == null) {
            this.width = jreader.getWidth(0);
          }
          if (imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_IMAGE_LENGTH) == null) {
            this.height = jreader.getHeight(0);
          }
          ImageTypeSpecifier imageType = jreader.getRawImageType(0);
          if (imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_SAMPLES_PER_PIXEL) == null) {
            this.samplesPerPixel = imageType.getSampleModel().getNumBands();
          }
          stream.reset();
          defaultBitDepth = imageType.getColorModel().getComponentSize(0);
        } catch (IOException e) {
          // Ignore it and proceed: an error will occur later.
        }
        jreader.dispose();
      }
    }

    if (samplesPerPixel < 1) {
      processWarningOccurred("Samples per pixel < 1!");
    }

    // SamplesPerPixel -> numBands
    numBands = samplesPerPixel;

    // ColorMap
    this.colorMap = null;
    f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_COLOR_MAP);
    if (f != null) {
      // Grab color map
      colorMap = f.getAsChars();
    }

    // PhotometricInterpretation
    f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_PHOTOMETRIC_INTERPRETATION);
    if (f == null) {
      if (compression == BaselineTIFFTagSet.COMPRESSION_CCITT_RLE
          || compression == BaselineTIFFTagSet.COMPRESSION_CCITT_T_4
          || compression == BaselineTIFFTagSet.COMPRESSION_CCITT_T_6) {
        processWarningOccurred(
            "PhotometricInterpretation field is missing; " + "assuming WhiteIsZero");
        photometricInterpretation = BaselineTIFFTagSet.PHOTOMETRIC_INTERPRETATION_WHITE_IS_ZERO;
      } else if (this.colorMap != null) {
        photometricInterpretation = BaselineTIFFTagSet.PHOTOMETRIC_INTERPRETATION_PALETTE_COLOR;
      } else if (samplesPerPixel == 3 || samplesPerPixel == 4) {
        photometricInterpretation = BaselineTIFFTagSet.PHOTOMETRIC_INTERPRETATION_RGB;
      } else {
        processWarningOccurred(
            "PhotometricInterpretation field is missing; " + "assuming BlackIsZero");
        photometricInterpretation = BaselineTIFFTagSet.PHOTOMETRIC_INTERPRETATION_BLACK_IS_ZERO;
      }
    } else {
      photometricInterpretation = f.getAsInt(0);
    }

    // SampleFormat
    boolean replicateFirst = false;
    int first = -1;

    f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_SAMPLE_FORMAT);
    sampleFormat = new int[samplesPerPixel];
    replicateFirst = false;
    if (f == null) {
      replicateFirst = true;
      first = BaselineTIFFTagSet.SAMPLE_FORMAT_UNDEFINED;
    } else if (f.getCount() != samplesPerPixel) {
      replicateFirst = true;
      first = f.getAsInt(0);
    }

    for (int i = 0; i < samplesPerPixel; i++) {
      sampleFormat[i] = replicateFirst ? first : f.getAsInt(i);
      if (sampleFormat[i] != BaselineTIFFTagSet.SAMPLE_FORMAT_UNSIGNED_INTEGER
          && sampleFormat[i] != BaselineTIFFTagSet.SAMPLE_FORMAT_SIGNED_INTEGER
          && sampleFormat[i] != BaselineTIFFTagSet.SAMPLE_FORMAT_FLOATING_POINT
          && sampleFormat[i] != BaselineTIFFTagSet.SAMPLE_FORMAT_UNDEFINED) {
        processWarningOccurred("Illegal value for SAMPLE_FORMAT, assuming SAMPLE_FORMAT_UNDEFINED");
        sampleFormat[i] = BaselineTIFFTagSet.SAMPLE_FORMAT_UNDEFINED;
      }
    }

    // BitsPerSample
    f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_BITS_PER_SAMPLE);
    this.bitsPerSample = new int[samplesPerPixel];
    replicateFirst = false;
    if (f == null) {
      replicateFirst = true;
      first = defaultBitDepth;
    } else if (f.getCount() != samplesPerPixel) {
      replicateFirst = true;
      first = f.getAsInt(0);
    }

    for (int i = 0; i < samplesPerPixel; i++) {
      // Replicate initial value if not enough values provided
      bitsPerSample[i] = replicateFirst ? first : f.getAsInt(i);

      if (DEBUG) {
        System.out.println("bitsPerSample[" + i + "] = " + bitsPerSample[i]);
      }
    }

    // ExtraSamples
    this.extraSamples = null;
    f = imageMetadata.getTIFFField(BaselineTIFFTagSet.TAG_EXTRA_SAMPLES);
    if (f != null) {
      extraSamples = f.getAsInts();
    }

    //         System.out.println("colorMap = " + colorMap);
    //         if (colorMap != null) {
    //             for (int i = 0; i < colorMap.length; i++) {
    //              System.out.println("colorMap[" + i + "] = " + (int)(colorMap[i]));
    //             }
    //         }

  }
Example #16
0
  private void initializeRead(int imageIndex, J2KImageReadParamJava param, J2KMetadata metadata) {
    try {
      iis.mark();
      in = new IISRandomAccessIO(iis);

      // **** File Format ****
      // If the codestream is wrapped in the jp2 fileformat, Read the
      // file format wrapper
      ff = new FileFormatReader(in, metadata);
      ff.readFileFormat();
      in.seek(ff.getFirstCodeStreamPos());

      hi = new HeaderInfo();
      try {
        hd = new HeaderDecoder(in, j2krparam, hi);
      } catch (EOFException e) {
        throw new RuntimeException(I18N.getString("J2KReadState2"));
      } catch (IOException ioe) {
        throw new RuntimeException(ioe);
      }

      this.width = hd.getImgWidth();
      this.height = hd.getImgHeight();

      Rectangle sourceRegion = param.getSourceRegion();
      sourceOrigin = new Point();
      sourceRegion = new Rectangle(hd.getImgULX(), hd.getImgULY(), this.width, this.height);

      // if the subsample rate for components are not consistent
      boolean compConsistent = true;
      stepX = hd.getCompSubsX(0);
      stepY = hd.getCompSubsY(0);
      for (int i = 1; i < nComp; i++) {
        if (stepX != hd.getCompSubsX(i) || stepY != hd.getCompSubsY(i))
          throw new RuntimeException(I18N.getString("J2KReadState12"));
      }

      // Get minimum number of resolution levels available across
      // all tile-components.
      int minResLevels = hd.getDecoderSpecs().dls.getMin();

      // Set current resolution level.
      this.resolution = param != null ? param.getResolution() : minResLevels;
      if (resolution < 0 || resolution > minResLevels) {
        resolution = minResLevels;
      }

      // Convert source region to lower resolution level.
      if (resolution != minResLevels || stepX != 1 || stepY != 1) {
        sourceRegion =
            J2KImageReader.getReducedRect(sourceRegion, minResLevels, resolution, stepX, stepY);
      }

      destinationRegion = (Rectangle) sourceRegion.clone();

      J2KImageReader.computeRegionsWrapper(
          param,
          false,
          this.width,
          this.height,
          param.getDestination(),
          sourceRegion,
          destinationRegion);

      sourceOrigin = new Point(sourceRegion.x, sourceRegion.y);
      scaleX = param.getSourceXSubsampling();
      scaleY = param.getSourceYSubsampling();
      xOffset = param.getSubsamplingXOffset();
      yOffset = param.getSubsamplingYOffset();

      this.width = destinationRegion.width;
      this.height = destinationRegion.height;

      Point tileOffset = hd.getTilingOrigin(null);

      this.tileWidth = hd.getNomTileWidth();
      this.tileHeight = hd.getNomTileHeight();

      // Convert tile 0 to lower resolution level.
      if (resolution != minResLevels || stepX != 1 || stepY != 1) {
        Rectangle tileRect = new Rectangle(tileOffset);
        tileRect.width = tileWidth;
        tileRect.height = tileHeight;
        tileRect = J2KImageReader.getReducedRect(tileRect, minResLevels, resolution, stepX, stepY);
        tileOffset = tileRect.getLocation();
        tileWidth = tileRect.width;
        tileHeight = tileRect.height;
      }

      tileXOffset = tileOffset.x;
      tileYOffset = tileOffset.y;

      // Set the tile step sizes. These values are used because it
      // is possible that tiles will be empty. In particular at lower
      // resolution levels when subsampling is used this may be the
      // case. This method of calculation will work at least for
      // Profile-0 images.
      if (tileWidth * (1 << (minResLevels - resolution)) * stepX > hd.getNomTileWidth()) {
        tileStepX =
            (tileWidth * (1 << (minResLevels - resolution)) * stepX + hd.getNomTileWidth() - 1)
                / hd.getNomTileWidth();
      } else {
        tileStepX = 1;
      }

      if (tileHeight * (1 << (minResLevels - resolution)) * stepY > hd.getNomTileHeight()) {
        tileStepY =
            (tileHeight * (1 << (minResLevels - resolution)) * stepY + hd.getNomTileHeight() - 1)
                / hd.getNomTileHeight();
      } else {
        tileStepY = 1;
      }

      if (!destinationRegion.equals(sourceRegion)) noTransform = false;

      // **** Header decoder ****
      // Instantiate header decoder and read main header
      decSpec = hd.getDecoderSpecs();

      // **** Instantiate decoding chain ****
      // Get demixed bitdepths
      nComp = hd.getNumComps();

      int[] depth = new int[nComp];
      for (int i = 0; i < nComp; i++) depth[i] = hd.getOriginalBitDepth(i);

      // Get channel mapping
      ChannelDefinitionBox cdb = null;
      if (metadata != null)
        cdb = (ChannelDefinitionBox) metadata.getElement("JPEG2000ChannelDefinitionBox");

      channelMap = new int[nComp];
      if (cdb != null && metadata.getElement("JPEG2000PaletteBox") == null) {
        short[] assoc = cdb.getAssociation();
        short[] types = cdb.getTypes();
        short[] channels = cdb.getChannel();

        for (int i = 0; i < types.length; i++)
          if (types[i] == 0) channelMap[channels[i]] = assoc[i] - 1;
          else if (types[i] == 1 || types[i] == 2) channelMap[channels[i]] = channels[i];
      } else {
        for (int i = 0; i < nComp; i++) channelMap[i] = i;
      }

      // **** Bitstream reader ****
      try {
        boolean logJJ2000Messages = Boolean.getBoolean("jj2000.j2k.decoder.log");
        breader =
            BitstreamReaderAgent.createInstance(in, hd, j2krparam, decSpec, logJJ2000Messages, hi);
      } catch (IOException e) {
        throw new RuntimeException(
            I18N.getString("J2KReadState3")
                + " "
                + ((e.getMessage() != null) ? (":\n" + e.getMessage()) : ""));
      } catch (IllegalArgumentException e) {
        throw new RuntimeException(
            I18N.getString("J2KReadState4")
                + " "
                + ((e.getMessage() != null) ? (":\n" + e.getMessage()) : ""));
      }

      // **** Entropy decoder ****
      try {
        entdec = hd.createEntropyDecoder(breader, j2krparam);
      } catch (IllegalArgumentException e) {
        throw new RuntimeException(
            I18N.getString("J2KReadState5")
                + " "
                + ((e.getMessage() != null) ? (":\n" + e.getMessage()) : ""));
      }

      // **** ROI de-scaler ****
      try {
        roids = hd.createROIDeScaler(entdec, j2krparam, decSpec);
      } catch (IllegalArgumentException e) {
        throw new RuntimeException(
            I18N.getString("J2KReadState6")
                + " "
                + ((e.getMessage() != null) ? (":\n" + e.getMessage()) : ""));
      }

      // **** Dequantizer ****
      try {
        deq = hd.createDequantizer(roids, depth, decSpec);
      } catch (IllegalArgumentException e) {
        throw new RuntimeException(
            I18N.getString("J2KReadState7")
                + " "
                + ((e.getMessage() != null) ? (":\n" + e.getMessage()) : ""));
      }

      // **** Inverse wavelet transform ***
      try {
        // full page inverse wavelet transform
        invWT = InverseWT.createInstance(deq, decSpec);
      } catch (IllegalArgumentException e) {
        throw new RuntimeException(
            I18N.getString("J2KReadState8")
                + " "
                + ((e.getMessage() != null) ? (":\n" + e.getMessage()) : ""));
      }

      int res = breader.getImgRes();
      int mrl = decSpec.dls.getMin();
      invWT.setImgResLevel(res);

      // **** Data converter **** (after inverse transform module)
      converter = new ImgDataConverter(invWT, 0);

      // **** Inverse component transformation ****
      ictransf = new InvCompTransf(converter, decSpec, depth);

      // If the destination band is set used it
      sourceBands = j2krparam.getSourceBands();

      if (sourceBands == null) {
        sourceBands = new int[nComp];
        for (int i = 0; i < nComp; i++) sourceBands[i] = i;
      }

      nComp = sourceBands.length;

      destinationBands = j2krparam.getDestinationBands();
      if (destinationBands == null) {
        destinationBands = new int[nComp];
        for (int i = 0; i < nComp; i++) destinationBands[i] = i;
      }

      J2KImageReader.checkReadParamBandSettingsWrapper(
          param, hd.getNumComps(), destinationBands.length);

      levelShift = new int[nComp];
      minValues = new int[nComp];
      maxValues = new int[nComp];
      fracBits = new int[nComp];
      dataBlocks = new DataBlkInt[nComp];

      depth = new int[nComp];
      bandOffsets = new int[nComp];
      maxDepth = 0;
      isSigned = false;
      for (int i = 0; i < nComp; i++) {
        depth[i] = hd.getOriginalBitDepth(sourceBands[i]);
        if (depth[i] > maxDepth) maxDepth = depth[i];
        dataBlocks[i] = new DataBlkInt();

        // XXX: may need to change if ChannelDefinition is used to
        // define the color channels, such as BGR order
        bandOffsets[i] = i;
        if (hd.isOriginalSigned(sourceBands[i])) isSigned = true;
        else {
          levelShift[i] = 1 << (ictransf.getNomRangeBits(sourceBands[i]) - 1);
        }

        // Get the number of bits in the image, and decide what the max
        // value should be, depending on whether it is signed or not
        int nomRangeBits = ictransf.getNomRangeBits(sourceBands[i]);
        maxValues[i] = (1 << (isSigned == true ? (nomRangeBits - 1) : nomRangeBits)) - 1;
        minValues[i] = isSigned ? -(maxValues[i] + 1) : 0;

        fracBits[i] = ictransf.getFixedPoint(sourceBands[i]);
      }

      iis.reset();
    } catch (IllegalArgumentException e) {
      throw new RuntimeException(e.getMessage(), e);
    } catch (Error e) {
      if (e.getMessage() != null) throw new RuntimeException(e.getMessage(), e);
      else {
        throw new RuntimeException(I18N.getString("J2KReadState9"), e);
      }
    } catch (RuntimeException e) {
      if (e.getMessage() != null)
        throw new RuntimeException(I18N.getString("J2KReadState10") + " " + e.getMessage(), e);
      else {
        throw new RuntimeException(I18N.getString("J2KReadState10"), e);
      }
    } catch (Throwable e) {
      throw new RuntimeException(I18N.getString("J2KReadState10"), e);
    }
  }
Example #17
0
  private void init(
      final BoundingBox granuleBBOX,
      final URL granuleUrl,
      final ImageReaderSpi suggestedSPI,
      final Geometry inclusionGeometry,
      final boolean heterogeneousGranules,
      final boolean handleArtifactsFiltering) {
    this.granuleBBOX = ReferencedEnvelope.reference(granuleBBOX);
    this.granuleUrl = granuleUrl;
    this.inclusionGeometry = inclusionGeometry;
    this.handleArtifactsFiltering = handleArtifactsFiltering;
    filterMe = handleArtifactsFiltering && inclusionGeometry != null;

    // create the base grid to world transformation
    ImageInputStream inStream = null;
    ImageReader reader = null;
    try {
      //
      // get info about the raster we have to read
      //

      // get a stream
      if (cachedStreamSPI == null) {
        cachedStreamSPI = ImageIOExt.getImageInputStreamSPI(granuleUrl, true);
        if (cachedStreamSPI == null) {
          final File file = DataUtilities.urlToFile(granuleUrl);
          if (file != null) {
            if (LOGGER.isLoggable(Level.WARNING)) {
              LOGGER.log(Level.WARNING, Utils.getFileInfo(file));
            }
          }
          throw new IllegalArgumentException(
              "Unable to get an input stream for the provided granule " + granuleUrl.toString());
        }
      }
      assert cachedStreamSPI != null : "no cachedStreamSPI available!";
      inStream =
          cachedStreamSPI.createInputStreamInstance(
              granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory());
      if (inStream == null) {
        final File file = DataUtilities.urlToFile(granuleUrl);
        if (file != null) {
          if (LOGGER.isLoggable(Level.WARNING)) {
            LOGGER.log(Level.WARNING, Utils.getFileInfo(file));
          }
        }
        throw new IllegalArgumentException(
            "Unable to get an input stream for the provided file " + granuleUrl.toString());
      }

      // get a reader and try to cache the suggested SPI first
      if (cachedReaderSPI == null) {
        inStream.mark();
        if (suggestedSPI != null && suggestedSPI.canDecodeInput(inStream)) {
          cachedReaderSPI = suggestedSPI;
          inStream.reset();
        } else {
          inStream.mark();
          reader = ImageIOExt.getImageioReader(inStream);
          if (reader != null) cachedReaderSPI = reader.getOriginatingProvider();
          inStream.reset();
        }
      }
      reader = cachedReaderSPI.createReaderInstance();
      if (reader == null)
        throw new IllegalArgumentException(
            "Unable to get an ImageReader for the provided file " + granuleUrl.toString());
      reader.setInput(inStream);
      // get selected level and base level dimensions
      final Rectangle originalDimension = Utils.getDimension(0, reader);

      // build the g2W for this tile, in principle we should get it
      // somehow from the tile itself or from the index, but at the moment
      // we do not have such info, hence we assume that it is a simple
      // scale and translate
      final GridToEnvelopeMapper geMapper =
          new GridToEnvelopeMapper(new GridEnvelope2D(originalDimension), granuleBBOX);
      geMapper.setPixelAnchor(
          PixelInCell
              .CELL_CENTER); // this is the default behavior but it is nice to write it down anyway
      this.baseGridToWorld = geMapper.createAffineTransform();

      try {
        if (inclusionGeometry != null) {
          geMapper.setPixelAnchor(PixelInCell.CELL_CORNER);
          Geometry mapped = JTS.transform(inclusionGeometry, geMapper.createTransform().inverse());
          this.granuleROIShape = new ROIGeometry(mapped);
        }

      } catch (TransformException e1) {
        throw new IllegalArgumentException(e1);
      }

      // add the base level
      this.granuleLevels.put(
          Integer.valueOf(0),
          new GranuleOverviewLevelDescriptor(
              1, 1, originalDimension.width, originalDimension.height));

      ////////////////////// Setting overviewController ///////////////////////
      if (heterogeneousGranules) {
        // //
        //
        // Right now we are setting up overviewsController by assuming that
        // overviews are internal images as happens in TIFF images
        // We can improve this by leveraging on coverageReaders
        //
        // //

        // Getting the first level descriptor
        final GranuleOverviewLevelDescriptor baseOverviewLevelDescriptor = granuleLevels.get(0);

        // Variables initialization
        final int numberOfOvervies = reader.getNumImages(true) - 1;
        final AffineTransform2D baseG2W = baseOverviewLevelDescriptor.getGridToWorldTransform();
        final int width = baseOverviewLevelDescriptor.getWidth();
        final int height = baseOverviewLevelDescriptor.getHeight();
        final double resX = AffineTransform2D.getScaleX0(baseG2W);
        final double resY = AffineTransform2D.getScaleY0(baseG2W);
        final double[] highestRes = new double[] {resX, resY};
        final double[][] overviewsResolution = new double[numberOfOvervies][2];

        // Populating overviews and initializing overviewsController
        for (int i = 0; i < numberOfOvervies; i++) {
          overviewsResolution[i][0] = (highestRes[0] * width) / reader.getWidth(i + 1);
          overviewsResolution[i][1] = (highestRes[1] * height) / reader.getWidth(i + 1);
        }
        overviewsController =
            new OverviewsController(highestRes, numberOfOvervies, overviewsResolution);
      }
      //////////////////////////////////////////////////////////////////////////

    } catch (IllegalStateException e) {
      throw new IllegalArgumentException(e);

    } catch (IOException e) {
      throw new IllegalArgumentException(e);
    } finally {
      // close/dispose stream and readers
      try {
        if (inStream != null) {
          inStream.close();
        }
      } catch (Throwable e) {
        throw new IllegalArgumentException(e);
      } finally {
        if (reader != null) {
          reader.dispose();
        }
      }
    }
  }
Example #18
0
  /** {@inheritDoc} */
  @Override
  public Image loadImage(final ImageInfo info, final Map hints, final ImageSessionContext session)
      throws ImageException, IOException {
    if (!MimeConstants.MIME_JPEG.equals(info.getMimeType())) {
      throw new IllegalArgumentException(
          "ImageInfo must be from a image with MIME type: " + MimeConstants.MIME_JPEG);
    }

    ColorSpace colorSpace = null;
    boolean appeFound = false;
    int sofType = 0;
    ByteArrayOutputStream iccStream = null;

    final Source src = session.needSource(info.getOriginalURI());
    final ImageInputStream in = ImageUtil.needImageInputStream(src);
    final JPEGFile jpeg = new JPEGFile(in);
    in.mark();
    try {
      outer:
      while (true) {
        int reclen;
        final int segID = jpeg.readMarkerSegment();
        if (log.isTraceEnabled()) {
          log.trace("Seg Marker: " + Integer.toHexString(segID));
        }
        switch (segID) {
          case EOI:
            log.trace("EOI found. Stopping.");
            break outer;
          case SOS:
            log.trace("SOS found. Stopping early."); // TODO Not sure if
            // this is safe
            break outer;
          case SOI:
          case NULL:
            break;
          case SOF0: // baseline
          case SOF1: // extended sequential DCT
          case SOF2: // progressive (since PDF 1.3)
          case SOFA: // progressive (since PDF 1.3)
            sofType = segID;
            if (log.isTraceEnabled()) {
              log.trace("SOF: " + Integer.toHexString(sofType));
            }
            in.mark();
            try {
              reclen = jpeg.readSegmentLength();
              in.skipBytes(1); // data precision
              in.skipBytes(2); // height
              in.skipBytes(2); // width
              final int numComponents = in.readUnsignedByte();
              if (numComponents == 1) {
                colorSpace = ColorSpace.getInstance(ColorSpace.CS_GRAY);
              } else if (numComponents == 3) {
                colorSpace = ColorSpace.getInstance(ColorSpace.CS_LINEAR_RGB);
              } else if (numComponents == 4) {
                colorSpace = ColorSpaces.getDeviceCMYKColorSpace();
              } else {
                throw new ImageException(
                    "Unsupported ColorSpace for image "
                        + info
                        + ". The number of components supported are 1, 3 and 4.");
              }
            } finally {
              in.reset();
            }
            in.skipBytes(reclen);
            break;
          case APP2: // ICC (see ICC1V42.pdf)
            in.mark();
            try {
              reclen = jpeg.readSegmentLength();
              // Check for ICC profile
              final byte[] iccString = new byte[11];
              in.readFully(iccString);
              in.skipBytes(1); // string terminator (null byte)

              if ("ICC_PROFILE".equals(new String(iccString, "US-ASCII"))) {
                in.skipBytes(2); // chunk sequence number and total
                // number of chunks
                final int payloadSize = reclen - 2 - 12 - 2;
                if (ignoreColorProfile(hints)) {
                  log.debug("Ignoring ICC profile data in JPEG");
                  in.skipBytes(payloadSize);
                } else {
                  final byte[] buf = new byte[payloadSize];
                  in.readFully(buf);
                  if (iccStream == null) {
                    if (log.isDebugEnabled()) {
                      log.debug("JPEG has an ICC profile");
                      final DataInputStream din =
                          new DataInputStream(new ByteArrayInputStream(buf));
                      log.debug("Declared ICC profile size: " + din.readInt());
                    }
                    // ICC profiles can be split into several
                    // chunks
                    // so collect in a byte array output stream
                    iccStream = new ByteArrayOutputStream();
                  }
                  iccStream.write(buf);
                }
              }
            } finally {
              in.reset();
            }
            in.skipBytes(reclen);
            break;
          case APPE: // Adobe-specific (see 5116.DCT_Filter.pdf)
            in.mark();
            try {
              reclen = jpeg.readSegmentLength();
              // Check for Adobe header
              final byte[] adobeHeader = new byte[5];
              in.readFully(adobeHeader);

              if ("Adobe".equals(new String(adobeHeader, "US-ASCII"))) {
                // The reason for reading the APPE marker is that
                // Adobe Photoshop
                // generates CMYK JPEGs with inverted values. The
                // correct thing
                // to do would be to interpret the values in the
                // marker, but for now
                // only assume that if APPE marker is present and
                // colorspace is CMYK,
                // the image is inverted.
                appeFound = true;
              }
            } finally {
              in.reset();
            }
            in.skipBytes(reclen);
            break;
          default:
            jpeg.skipCurrentMarkerSegment();
        }
      }
    } finally {
      in.reset();
    }

    final ICC_Profile iccProfile = buildICCProfile(info, colorSpace, iccStream);
    if (iccProfile == null && colorSpace == null) {
      throw new ImageException("ColorSpace could not be identified for JPEG image " + info);
    }

    boolean invertImage = false;
    if (appeFound && colorSpace.getType() == ColorSpace.TYPE_CMYK) {
      if (log.isDebugEnabled()) {
        log.debug(
            "JPEG has an Adobe APPE marker. Note: CMYK Image will be inverted. ("
                + info.getOriginalURI()
                + ")");
      }
      invertImage = true;
    }

    final ImageRawJPEG rawImage =
        new ImageRawJPEG(
            info, ImageUtil.needInputStream(src), sofType, colorSpace, iccProfile, invertImage);
    return rawImage;
  }