GranuleOverviewLevelDescriptor getLevel(final int index) { // load level // create the base grid to world transformation ImageInputStream inStream = null; ImageReader reader = null; try { // get a stream assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance( granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) throw new IllegalArgumentException( "Unable to create an inputstream for the granuleurl:" + (granuleUrl != null ? granuleUrl : "null")); // get a reader and try to cache the relevant SPI if (cachedReaderSPI == null) { reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); } else reader = cachedReaderSPI.createReaderInstance(); if (reader == null) throw new IllegalArgumentException( "Unable to get an ImageReader for the provided file " + granuleUrl.toString()); reader.setInput(inStream); // call internal method which will close everything return getLevel(index, reader); } catch (IllegalStateException e) { // clean up try { if (inStream != null) inStream.close(); } catch (Throwable ee) { } finally { if (reader != null) reader.dispose(); } throw new IllegalArgumentException(e); } catch (IOException e) { // clean up try { if (inStream != null) inStream.close(); } catch (Throwable ee) { } finally { if (reader != null) reader.dispose(); } throw new IllegalArgumentException(e); } }
/** * Returns the geotiff metadata for this geotiff file. * * @return the metadata */ public GeoTiffIIOMetadataDecoder getMetadata() { GeoTiffIIOMetadataDecoder metadata = null; ImageReader reader = null; boolean closeMe = true; ImageInputStream stream = null; try { if ((source instanceof InputStream) || (source instanceof ImageInputStream)) { closeMe = false; } if (source instanceof ImageInputStream) { stream = (ImageInputStream) source; } else { inStreamSPI = ImageIOExt.getImageInputStreamSPI(source); if (inStreamSPI == null) { throw new IllegalArgumentException("No input stream for the provided source"); } stream = inStreamSPI.createInputStreamInstance( source, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); } if (stream == null) { throw new IllegalArgumentException("No input stream for the provided source"); } stream.mark(); reader = READER_SPI.createReaderInstance(); reader.setInput(stream); final IIOMetadata iioMetadata = reader.getImageMetadata(0); metadata = new GeoTiffIIOMetadataDecoder(iioMetadata); } catch (IOException e) { if (LOGGER.isLoggable(Level.SEVERE)) { LOGGER.log(Level.SEVERE, e.getMessage(), e); } } finally { if (reader != null) try { reader.dispose(); } catch (Throwable t) { } if (stream != null) { try { stream.reset(); } catch (Throwable t) { } if (closeMe) { try { stream.close(); } catch (Throwable t) { } } } } return metadata; }
static { // issue 476: appContextProtection is disabled by default in JreMemoryLeakPreventionListener // since Tomcat 7.0.42, // so protect from sun.awt.AppContext ourselves final ClassLoader loader = Thread.currentThread().getContextClassLoader(); try { // Use the system classloader as the victim for this ClassLoader pinning we're about to do. Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader()); // Trigger a call to sun.awt.AppContext.getAppContext(). // This will pin the system class loader in memory but that shouldn't be an issue. ImageIO.getCacheDirectory(); } catch (final Throwable t) { // NOPMD LOG.info("prevention of AppContext ClassLoader leak failed, skipping"); } finally { Thread.currentThread().setContextClassLoader(loader); } }
/** * This method reads in the TIFF image, constructs an appropriate CRS, determines the math * transform from raster to the CRS model, and constructs a GridCoverage. * * @param params currently ignored, potentially may be used for hints. * @return grid coverage represented by the image * @throws IOException on any IO related troubles */ public GridCoverage2D read(GeneralParameterValue[] params) throws IOException { GeneralEnvelope requestedEnvelope = null; Rectangle dim = null; Color inputTransparentColor = null; OverviewPolicy overviewPolicy = null; int[] suggestedTileSize = null; if (params != null) { // // Checking params // if (params != null) { for (int i = 0; i < params.length; i++) { final ParameterValue param = (ParameterValue) params[i]; final ReferenceIdentifier name = param.getDescriptor().getName(); if (name.equals(AbstractGridFormat.READ_GRIDGEOMETRY2D.getName())) { final GridGeometry2D gg = (GridGeometry2D) param.getValue(); requestedEnvelope = new GeneralEnvelope((Envelope) gg.getEnvelope2D()); dim = gg.getGridRange2D().getBounds(); continue; } if (name.equals(AbstractGridFormat.OVERVIEW_POLICY.getName())) { overviewPolicy = (OverviewPolicy) param.getValue(); continue; } if (name.equals(AbstractGridFormat.INPUT_TRANSPARENT_COLOR.getName())) { inputTransparentColor = (Color) param.getValue(); continue; } if (name.equals(AbstractGridFormat.SUGGESTED_TILE_SIZE.getName())) { String suggestedTileSize_ = (String) param.getValue(); if (suggestedTileSize_ != null && suggestedTileSize_.length() > 0) { suggestedTileSize_ = suggestedTileSize_.trim(); int commaPosition = suggestedTileSize_.indexOf(","); if (commaPosition < 0) { int tileDim = Integer.parseInt(suggestedTileSize_); suggestedTileSize = new int[] {tileDim, tileDim}; } else { int tileW = Integer.parseInt(suggestedTileSize_.substring(0, commaPosition)); int tileH = Integer.parseInt(suggestedTileSize_.substring(commaPosition + 1)); suggestedTileSize = new int[] {tileW, tileH}; } } continue; } } } } // // set params // Integer imageChoice = new Integer(0); final ImageReadParam readP = new ImageReadParam(); try { imageChoice = setReadParams(overviewPolicy, readP, requestedEnvelope, dim); } catch (TransformException e) { new DataSourceException(e); } // // IMAGE READ OPERATION // Hints newHints = null; if (suggestedTileSize != null) { newHints = hints.clone(); final ImageLayout layout = new ImageLayout(); layout.setTileGridXOffset(0); layout.setTileGridYOffset(0); layout.setTileHeight(suggestedTileSize[1]); layout.setTileWidth(suggestedTileSize[0]); newHints.add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout)); } final ParameterBlock pbjRead = new ParameterBlock(); if (extOvrImgChoice >= 0 && imageChoice >= extOvrImgChoice) { pbjRead.add( ovrInStreamSPI.createInputStreamInstance( ovrSource, ImageIO.getUseCache(), ImageIO.getCacheDirectory())); pbjRead.add(imageChoice - extOvrImgChoice); } else { pbjRead.add( inStreamSPI != null ? inStreamSPI.createInputStreamInstance( source, ImageIO.getUseCache(), ImageIO.getCacheDirectory()) : ImageIO.createImageInputStream(source)); pbjRead.add(imageChoice); } pbjRead.add(Boolean.FALSE); pbjRead.add(Boolean.FALSE); pbjRead.add(Boolean.FALSE); pbjRead.add(null); pbjRead.add(null); pbjRead.add(readP); pbjRead.add(READER_SPI.createReaderInstance()); RenderedOp coverageRaster = JAI.create("ImageRead", pbjRead, newHints != null ? (RenderingHints) newHints : null); // // MASKING INPUT COLOR as indicated // if (inputTransparentColor != null) { coverageRaster = new ImageWorker(coverageRaster) .setRenderingHints(newHints) .makeColorTransparent(inputTransparentColor) .getRenderedOperation(); } AffineTransform rasterToModel = getRescaledRasterToModel(coverageRaster); try { return createCoverage(coverageRaster, ProjectiveTransform.create(rasterToModel)); } catch (Exception e) { // dispose and close file ImageUtilities.disposePlanarImageChain(coverageRaster); // rethrow if (e instanceof DataSourceException) { throw (DataSourceException) e; } throw new DataSourceException(e); } }
/** * Collect georeferencing information about this geotiff. * * @param hints * @throws DataSourceException */ private void getHRInfo(Hints hints) throws DataSourceException { ImageReader reader = null; ImageReader ovrReader = null; ImageInputStream ovrStream = null; try { // // // // Get a reader for this format // // // reader = READER_SPI.createReaderInstance(); // // // // get the METADATA // // // inStream.mark(); reader.setInput(inStream); final IIOMetadata iioMetadata = reader.getImageMetadata(0); final GeoTiffIIOMetadataDecoder metadata = new GeoTiffIIOMetadataDecoder(iioMetadata); gtcs = new GeoTiffMetadata2CRSAdapter(hints); // // // // get the CRS INFO // // // final Object tempCRS = this.hints.get(Hints.DEFAULT_COORDINATE_REFERENCE_SYSTEM); if (tempCRS != null) { this.crs = (CoordinateReferenceSystem) tempCRS; if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, "Using forced coordinate reference system"); } else { // check external prj first crs = getCRS(source); // now, if we did not want to override the inner CRS or we did not have any external PRJ at // hand // let's look inside the geotiff if (!OVERRIDE_INNER_CRS || crs == null) { if (metadata.hasGeoKey() && gtcs != null) { crs = gtcs.createCoordinateSystem(metadata); } } } // // No data // if (metadata.hasNoData()) { noData = metadata.getNoData(); } // // parse and set layout // setLayout(reader); // // // // get the dimension of the hr image and build the model as well as // computing the resolution // // numOverviews = reader.getNumImages(true) - 1; int hrWidth = reader.getWidth(0); int hrHeight = reader.getHeight(0); final Rectangle actualDim = new Rectangle(0, 0, hrWidth, hrHeight); originalGridRange = new GridEnvelope2D(actualDim); if (gtcs != null && metadata != null && (metadata.hasModelTrasformation() || (metadata.hasPixelScales() && metadata.hasTiePoints()))) { this.raster2Model = GeoTiffMetadata2CRSAdapter.getRasterToModel(metadata); } else { // world file this.raster2Model = parseWorldFile(source); // now world file --> mapinfo? if (raster2Model == null) { MapInfoFileReader mifReader = parseMapInfoFile(source); if (mifReader != null) { raster2Model = mifReader.getTransform(); crs = mifReader.getCRS(); } } } if (crs == null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.warning("Coordinate Reference System is not available"); } crs = AbstractGridFormat.getDefaultCRS(); } if (this.raster2Model == null) { TiePoint[] modelTiePoints = metadata.getModelTiePoints(); if (modelTiePoints != null && modelTiePoints.length > 1) { // use a unit transform and expose the GCPs gcps = new GroundControlPoints(Arrays.asList(modelTiePoints), crs); raster2Model = ProjectiveTransform.create(new AffineTransform()); crs = AbstractGridFormat.getDefaultCRS(); } else { throw new DataSourceException("Raster to Model Transformation is not available"); } } // create envelope using corner transformation final AffineTransform tempTransform = new AffineTransform((AffineTransform) raster2Model); tempTransform.concatenate(CoverageUtilities.CENTER_TO_CORNER); originalEnvelope = CRS.transform(ProjectiveTransform.create(tempTransform), new GeneralEnvelope(actualDim)); originalEnvelope.setCoordinateReferenceSystem(crs); // /// // // setting the higher resolution available for this coverage // // /// highestRes = new double[2]; highestRes[0] = XAffineTransform.getScaleX0(tempTransform); highestRes[1] = XAffineTransform.getScaleY0(tempTransform); if (ovrInStreamSPI != null) { ovrReader = READER_SPI.createReaderInstance(); ovrStream = ovrInStreamSPI.createInputStreamInstance( ovrSource, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); ovrReader.setInput(ovrStream); // this includes the real image as this is a image index, we need to add one. extOvrImgChoice = numOverviews + 1; numOverviews = numOverviews + ovrReader.getNumImages(true); if (numOverviews < extOvrImgChoice) extOvrImgChoice = -1; } // // // // get information for the successive images // // // if (numOverviews >= 1) { overViewResolutions = new double[numOverviews][2]; // Internal overviews start at 1, so lastInternalOverview matches numOverviews if no // external. int firstExternalOverview = extOvrImgChoice == -1 ? numOverviews : extOvrImgChoice - 1; double spanRes0 = highestRes[0] * this.originalGridRange.getSpan(0); double spanRes1 = highestRes[1] * this.originalGridRange.getSpan(1); for (int i = 0; i < firstExternalOverview; i++) { overViewResolutions[i][0] = spanRes0 / reader.getWidth(i + 1); overViewResolutions[i][1] = spanRes1 / reader.getHeight(i + 1); } for (int i = firstExternalOverview; i < numOverviews; i++) { overViewResolutions[i][0] = spanRes0 / ovrReader.getWidth(i - firstExternalOverview); overViewResolutions[i][1] = spanRes1 / ovrReader.getHeight(i - firstExternalOverview); } } else overViewResolutions = null; } catch (Throwable e) { throw new DataSourceException(e); } finally { if (reader != null) try { reader.dispose(); } catch (Throwable t) { } if (ovrReader != null) try { ovrReader.dispose(); } catch (Throwable t) { } if (ovrStream != null) try { ovrStream.close(); } catch (Throwable t) { } if (inStream != null) try { inStream.reset(); } catch (Throwable t) { } } }
/** * Creates a new instance of GeoTiffReader * * @param input the GeoTiff file * @param uHints user-supplied hints TODO currently are unused * @throws DataSourceException */ public GeoTiffReader(Object input, Hints uHints) throws DataSourceException { super(input, uHints); // ///////////////////////////////////////////////////////////////////// // // Set the source being careful in case it is an URL pointing to a file // // ///////////////////////////////////////////////////////////////////// try { // setting source if (input instanceof URL) { final URL sourceURL = (URL) input; source = DataUtilities.urlToFile(sourceURL); } closeMe = true; // ///////////////////////////////////////////////////////////////////// // // Get a stream in order to read from it for getting the basic // information for this coverage // // ///////////////////////////////////////////////////////////////////// if ((source instanceof InputStream) || (source instanceof ImageInputStream)) closeMe = false; if (source instanceof ImageInputStream) inStream = (ImageInputStream) source; else { inStreamSPI = ImageIOExt.getImageInputStreamSPI(source); if (inStreamSPI == null) throw new IllegalArgumentException("No input stream for the provided source"); inStream = inStreamSPI.createInputStreamInstance( source, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); } if (inStream == null) { // Try to figure out what went wrong and provide some info to the user. if (source instanceof File) { File f = (File) source; if (!f.exists()) { throw new FileNotFoundException("File " + f.getAbsolutePath() + " does not exist."); } else if (f.isDirectory()) { throw new IOException("File " + f.getAbsolutePath() + " is a directory."); } else if (!f.canRead()) { throw new IOException("File " + f.getAbsolutePath() + " can not be read."); } } // If we can't give anything more specific, throw the generic error. throw new IllegalArgumentException("No input stream for the provided source"); } checkForExternalOverviews(); // ///////////////////////////////////////////////////////////////////// // // Informations about multiple levels and such // // ///////////////////////////////////////////////////////////////////// getHRInfo(this.hints); // ///////////////////////////////////////////////////////////////////// // // Coverage name // // ///////////////////////////////////////////////////////////////////// coverageName = source instanceof File ? ((File) source).getName() : "geotiff_coverage"; final int dotIndex = coverageName.lastIndexOf('.'); if (dotIndex != -1 && dotIndex != coverageName.length()) coverageName = coverageName.substring(0, dotIndex); } catch (IOException e) { throw new DataSourceException(e); } finally { // ///////////////////////////////////////////////////////////////////// // // Freeing streams // // ///////////////////////////////////////////////////////////////////// if (closeMe && inStream != null) // try { inStream.close(); } catch (Throwable t) { } } }
public void lifecycleEvent(LifecycleEvent event) { // Initialise these classes when Tomcat starts if (Lifecycle.INIT_EVENT.equals(event.getType())) { ClassLoader loader = Thread.currentThread().getContextClassLoader(); try { // Use the system classloader as the victim for all this // ClassLoader pinning we're about to do. Thread.currentThread().setContextClassLoader(ClassLoader.getSystemClassLoader()); /* * First call to this loads all drivers in the current class * loader */ if (driverManagerProtection) { DriverManager.getDrivers(); } /* * Several components end up calling: * sun.awt.AppContext.getAppContext() * * Those libraries / components known to trigger memory leaks * due to eventual calls to getAppContext() are: * - Google Web Toolkit via its use of javax.imageio * - Tomcat via its use of java.beans.Introspector.flushCaches() * in 1.6.0_15 onwards * - others TBD */ // Trigger a call to sun.awt.AppContext.getAppContext(). This // will pin the system class loader in memory but that shouldn't // be an issue. if (appContextProtection) { ImageIO.getCacheDirectory(); } // Trigger the creation of the AWT (AWT-Windows, AWT-XAWT, // etc.) thread if (awtThreadProtection) { java.awt.Toolkit.getDefaultToolkit(); } /* * Several components end up calling * sun.misc.GC.requestLatency(long) which creates a daemon * thread without setting the TCCL. * * Those libraries / components known to trigger memory leaks * due to eventual calls to requestLatency(long) are: * - javax.management.remote.rmi.RMIConnectorServer.start() * * Note: Long.MAX_VALUE is a special case that causes the thread * to terminate * */ if (gcDaemonProtection) { try { Class<?> clazz = Class.forName("sun.misc.GC"); Method method = clazz.getDeclaredMethod("requestLatency", new Class[] {long.class}); method.invoke(null, Long.valueOf(Long.MAX_VALUE - 1)); } catch (ClassNotFoundException e) { if (System.getProperty("java.vendor").startsWith("Sun")) { log.error(sm.getString("jreLeakListener.gcDaemonFail"), e); } else { log.debug(sm.getString("jreLeakListener.gcDaemonFail"), e); } } catch (SecurityException e) { log.error(sm.getString("jreLeakListener.gcDaemonFail"), e); } catch (NoSuchMethodException e) { log.error(sm.getString("jreLeakListener.gcDaemonFail"), e); } catch (IllegalArgumentException e) { log.error(sm.getString("jreLeakListener.gcDaemonFail"), e); } catch (IllegalAccessException e) { log.error(sm.getString("jreLeakListener.gcDaemonFail"), e); } catch (InvocationTargetException e) { log.error(sm.getString("jreLeakListener.gcDaemonFail"), e); } } /* * Calling getPolicy retains a static reference to the context * class loader. */ if (securityPolicyProtection) { try { // Policy.getPolicy(); Class<?> policyClass = Class.forName("javax.security.auth.Policy"); Method method = policyClass.getMethod("getPolicy"); method.invoke(null); } catch (ClassNotFoundException e) { // Ignore. The class is deprecated. } catch (SecurityException e) { // Ignore. Don't need call to getPolicy() to be // successful, just need to trigger static initializer. } catch (NoSuchMethodException e) { log.warn(sm.getString("jreLeakListener.authPolicyFail"), e); } catch (IllegalArgumentException e) { log.warn(sm.getString("jreLeakListener.authPolicyFail"), e); } catch (IllegalAccessException e) { log.warn(sm.getString("jreLeakListener.authPolicyFail"), e); } catch (InvocationTargetException e) { log.warn(sm.getString("jreLeakListener.authPolicyFail"), e); } } /* * Initializing javax.security.auth.login.Configuration retains a static reference to the context * class loader. */ if (securityLoginConfigurationProtection) { try { Class.forName( "javax.security.auth.login.Configuration", true, ClassLoader.getSystemClassLoader()); } catch (ClassNotFoundException e) { // Ignore } } /* * Creating a MessageDigest during web application startup * initializes the Java Cryptography Architecture. Under certain * conditions this starts a Token poller thread with TCCL equal * to the web application class loader. * * Instead we initialize JCA right now. */ if (tokenPollerProtection) { java.security.Security.getProviders(); } /* * Several components end up opening JarURLConnections without * first disabling caching. This effectively locks the file. * Whilst more noticeable and harder to ignore on Windows, it * affects all operating systems. * * Those libraries/components known to trigger this issue * include: * - log4j versions 1.2.15 and earlier * - javax.xml.bind.JAXBContext.newInstance() */ // Set the default URL caching policy to not to cache if (urlCacheProtection) { try { // Doesn't matter that this JAR doesn't exist - just as // long as the URL is well-formed URL url = new URL("jar:file://dummy.jar!/"); URLConnection uConn = url.openConnection(); uConn.setDefaultUseCaches(false); } catch (MalformedURLException e) { log.error(sm.getString("jreLeakListener.jarUrlConnCacheFail"), e); } catch (IOException e) { log.error(sm.getString("jreLeakListener.jarUrlConnCacheFail"), e); } } /* * Haven't got to the root of what is going on with this leak * but if a web app is the first to make the calls below the web * application class loader will be pinned in memory. */ if (xmlParsingProtection) { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); try { factory.newDocumentBuilder(); } catch (ParserConfigurationException e) { log.error(sm.getString("jreLeakListener.xmlParseFail"), e); } } if (ldapPoolProtection) { try { Class.forName("com.sun.jndi.ldap.LdapPoolManager"); } catch (ClassNotFoundException e) { if (System.getProperty("java.vendor").startsWith("Sun")) { log.error(sm.getString("jreLeakListener.ldapPoolManagerFail"), e); } else { log.debug(sm.getString("jreLeakListener.ldapPoolManagerFail"), e); } } } if (classesToInitialize != null) { StringTokenizer strTok = new StringTokenizer(classesToInitialize, ", \r\n\t"); while (strTok.hasMoreTokens()) { String classNameToLoad = strTok.nextToken(); try { Class.forName(classNameToLoad); } catch (ClassNotFoundException e) { log.error(sm.getString("jreLeakListener.classToInitializeFail", classNameToLoad), e); // continue with next class to load } } } } finally { Thread.currentThread().setContextClassLoader(loader); } } }
/** * Load a specified a raster as a portion of the granule describe by this {@link * GranuleDescriptor}. * * @param imageReadParameters the {@link ImageReadParam} to use for reading. * @param index the index to use for the {@link ImageReader}. * @param cropBBox the bbox to use for cropping. * @param mosaicWorldToGrid the cropping grid to world transform. * @param request the incoming request to satisfy. * @param hints {@link Hints} to be used for creating this raster. * @return a specified a raster as a portion of the granule describe by this {@link * GranuleDescriptor}. * @throws IOException in case an error occurs. */ public GranuleLoadingResult loadRaster( final ImageReadParam imageReadParameters, final int index, final ReferencedEnvelope cropBBox, final MathTransform2D mosaicWorldToGrid, final RasterLayerRequest request, final Hints hints) throws IOException { if (LOGGER.isLoggable(java.util.logging.Level.FINER)) { final String name = Thread.currentThread().getName(); LOGGER.finer( "Thread:" + name + " Loading raster data for granuleDescriptor " + this.toString()); } ImageReadParam readParameters = null; int imageIndex; final ReferencedEnvelope bbox = inclusionGeometry != null ? new ReferencedEnvelope( granuleBBOX.intersection(inclusionGeometry.getEnvelopeInternal()), granuleBBOX.getCoordinateReferenceSystem()) : granuleBBOX; boolean doFiltering = false; if (filterMe) { doFiltering = Utils.areaIsDifferent(inclusionGeometry, baseGridToWorld, granuleBBOX); } // intersection of this tile bound with the current crop bbox final ReferencedEnvelope intersection = new ReferencedEnvelope( bbox.intersection(cropBBox), cropBBox.getCoordinateReferenceSystem()); if (intersection.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine( new StringBuilder("Got empty intersection for granule ") .append(this.toString()) .append(" with request ") .append(request.toString()) .append(" Resulting in no granule loaded: Empty result") .toString()); } return null; } ImageInputStream inStream = null; ImageReader reader = null; try { // // get info about the raster we have to read // // get a stream assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance( granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) return null; // get a reader and try to cache the relevant SPI if (cachedReaderSPI == null) { reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); } else reader = cachedReaderSPI.createReaderInstance(); if (reader == null) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.warning( new StringBuilder("Unable to get s reader for granuleDescriptor ") .append(this.toString()) .append(" with request ") .append(request.toString()) .append(" Resulting in no granule loaded: Empty result") .toString()); } return null; } // set input reader.setInput(inStream); // Checking for heterogeneous granules if (request.isHeterogeneousGranules()) { // create read parameters readParameters = new ImageReadParam(); // override the overviews controller for the base layer imageIndex = ReadParamsController.setReadParams( request.getRequestedResolution(), request.getOverviewPolicy(), request.getDecimationPolicy(), readParameters, request.rasterManager, overviewsController); } else { imageIndex = index; readParameters = imageReadParameters; } // get selected level and base level dimensions final GranuleOverviewLevelDescriptor selectedlevel = getLevel(imageIndex, reader); // now create the crop grid to world which can be used to decide // which source area we need to crop in the selected level taking // into account the scale factors imposed by the selection of this // level together with the base level grid to world transformation AffineTransform2D cropWorldToGrid = new AffineTransform2D(selectedlevel.gridToWorldTransformCorner); cropWorldToGrid = (AffineTransform2D) cropWorldToGrid.inverse(); // computing the crop source area which lives into the // selected level raster space, NOTICE that at the end we need to // take into account the fact that we might also decimate therefore // we cannot just use the crop grid to world but we need to correct // it. final Rectangle sourceArea = CRS.transform(cropWorldToGrid, intersection).toRectangle2D().getBounds(); // gutter if (selectedlevel.baseToLevelTransform.isIdentity()) sourceArea.grow(2, 2); XRectangle2D.intersect( sourceArea, selectedlevel.rasterDimensions, sourceArea); // make sure roundings don't bother us // is it empty?? if (sourceArea.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.fine( "Got empty area for granuleDescriptor " + this.toString() + " with request " + request.toString() + " Resulting in no granule loaded: Empty result"); } return null; } else if (LOGGER.isLoggable(java.util.logging.Level.FINER)) { LOGGER.finer( "Loading level " + imageIndex + " with source region: " + sourceArea + " subsampling: " + readParameters.getSourceXSubsampling() + "," + readParameters.getSourceYSubsampling() + " for granule:" + granuleUrl); } // Setting subsampling int newSubSamplingFactor = 0; final String pluginName = cachedReaderSPI.getPluginClassName(); if (pluginName != null && pluginName.equals(ImageUtilities.DIRECT_KAKADU_PLUGIN)) { final int ssx = readParameters.getSourceXSubsampling(); final int ssy = readParameters.getSourceYSubsampling(); newSubSamplingFactor = ImageIOUtilities.getSubSamplingFactor2(ssx, ssy); if (newSubSamplingFactor != 0) { if (newSubSamplingFactor > maxDecimationFactor && maxDecimationFactor != -1) { newSubSamplingFactor = maxDecimationFactor; } readParameters.setSourceSubsampling(newSubSamplingFactor, newSubSamplingFactor, 0, 0); } } // set the source region readParameters.setSourceRegion(sourceArea); final RenderedImage raster; try { // read raster = request .getReadType() .read( readParameters, imageIndex, granuleUrl, selectedlevel.rasterDimensions, reader, hints, false); } catch (Throwable e) { if (LOGGER.isLoggable(java.util.logging.Level.FINE)) { LOGGER.log( java.util.logging.Level.FINE, "Unable to load raster for granuleDescriptor " + this.toString() + " with request " + request.toString() + " Resulting in no granule loaded: Empty result", e); } return null; } // use fixed source area sourceArea.setRect(readParameters.getSourceRegion()); // // setting new coefficients to define a new affineTransformation // to be applied to the grid to world transformation // ----------------------------------------------------------------------------------- // // With respect to the original envelope, the obtained planarImage // needs to be rescaled. The scaling factors are computed as the // ratio between the cropped source region sizes and the read // image sizes. // // place it in the mosaic using the coords created above; double decimationScaleX = ((1.0 * sourceArea.width) / raster.getWidth()); double decimationScaleY = ((1.0 * sourceArea.height) / raster.getHeight()); final AffineTransform decimationScaleTranform = XAffineTransform.getScaleInstance(decimationScaleX, decimationScaleY); // keep into account translation to work into the selected level raster space final AffineTransform afterDecimationTranslateTranform = XAffineTransform.getTranslateInstance(sourceArea.x, sourceArea.y); // now we need to go back to the base level raster space final AffineTransform backToBaseLevelScaleTransform = selectedlevel.baseToLevelTransform; // now create the overall transform final AffineTransform finalRaster2Model = new AffineTransform(baseGridToWorld); finalRaster2Model.concatenate(CoverageUtilities.CENTER_TO_CORNER); final double x = finalRaster2Model.getTranslateX(); final double y = finalRaster2Model.getTranslateY(); if (!XAffineTransform.isIdentity(backToBaseLevelScaleTransform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(backToBaseLevelScaleTransform); if (!XAffineTransform.isIdentity(afterDecimationTranslateTranform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(afterDecimationTranslateTranform); if (!XAffineTransform.isIdentity(decimationScaleTranform, Utils.AFFINE_IDENTITY_EPS)) finalRaster2Model.concatenate(decimationScaleTranform); // keep into account translation factors to place this tile finalRaster2Model.preConcatenate((AffineTransform) mosaicWorldToGrid); final Interpolation interpolation = request.getInterpolation(); // paranoiac check to avoid that JAI freaks out when computing its internal layouT on images // that are too small Rectangle2D finalLayout = ImageUtilities.layoutHelper( raster, (float) finalRaster2Model.getScaleX(), (float) finalRaster2Model.getScaleY(), (float) finalRaster2Model.getTranslateX(), (float) finalRaster2Model.getTranslateY(), interpolation); if (finalLayout.isEmpty()) { if (LOGGER.isLoggable(java.util.logging.Level.INFO)) LOGGER.info( "Unable to create a granuleDescriptor " + this.toString() + " due to jai scale bug creating a null source area"); return null; } ROI granuleLoadingShape = null; if (granuleROIShape != null) { final Point2D translate = mosaicWorldToGrid.transform(new DirectPosition2D(x, y), (Point2D) null); AffineTransform tx2 = new AffineTransform(); tx2.preConcatenate( AffineTransform.getScaleInstance( ((AffineTransform) mosaicWorldToGrid).getScaleX(), -((AffineTransform) mosaicWorldToGrid).getScaleY())); tx2.preConcatenate( AffineTransform.getScaleInstance( ((AffineTransform) baseGridToWorld).getScaleX(), -((AffineTransform) baseGridToWorld).getScaleY())); tx2.preConcatenate( AffineTransform.getTranslateInstance(translate.getX(), translate.getY())); granuleLoadingShape = (ROI) granuleROIShape.transform(tx2); } // apply the affine transform conserving indexed color model final RenderingHints localHints = new RenderingHints( JAI.KEY_REPLACE_INDEX_COLOR_MODEL, interpolation instanceof InterpolationNearest ? Boolean.FALSE : Boolean.TRUE); if (XAffineTransform.isIdentity(finalRaster2Model, Utils.AFFINE_IDENTITY_EPS)) { return new GranuleLoadingResult(raster, granuleLoadingShape, granuleUrl, doFiltering); } else { // // In case we are asked to use certain tile dimensions we tile // also at this stage in case the read type is Direct since // buffered images comes up untiled and this can affect the // performances of the subsequent affine operation. // final Dimension tileDimensions = request.getTileDimensions(); if (tileDimensions != null && request.getReadType().equals(ReadType.DIRECT_READ)) { final ImageLayout layout = new ImageLayout(); layout.setTileHeight(tileDimensions.width).setTileWidth(tileDimensions.height); localHints.add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout)); } else { if (hints != null && hints.containsKey(JAI.KEY_IMAGE_LAYOUT)) { final Object layout = hints.get(JAI.KEY_IMAGE_LAYOUT); if (layout != null && layout instanceof ImageLayout) { localHints.add( new RenderingHints(JAI.KEY_IMAGE_LAYOUT, ((ImageLayout) layout).clone())); } } } if (hints != null && hints.containsKey(JAI.KEY_TILE_CACHE)) { final Object cache = hints.get(JAI.KEY_TILE_CACHE); if (cache != null && cache instanceof TileCache) localHints.add(new RenderingHints(JAI.KEY_TILE_CACHE, (TileCache) cache)); } if (hints != null && hints.containsKey(JAI.KEY_TILE_SCHEDULER)) { final Object scheduler = hints.get(JAI.KEY_TILE_SCHEDULER); if (scheduler != null && scheduler instanceof TileScheduler) localHints.add(new RenderingHints(JAI.KEY_TILE_SCHEDULER, (TileScheduler) scheduler)); } boolean addBorderExtender = true; if (hints != null && hints.containsKey(JAI.KEY_BORDER_EXTENDER)) { final Object extender = hints.get(JAI.KEY_BORDER_EXTENDER); if (extender != null && extender instanceof BorderExtender) { localHints.add(new RenderingHints(JAI.KEY_BORDER_EXTENDER, (BorderExtender) extender)); addBorderExtender = false; } } // border extender if (addBorderExtender) { localHints.add(ImageUtilities.BORDER_EXTENDER_HINTS); } // boolean hasScaleX=!(Math.abs(finalRaster2Model.getScaleX()-1) < // 1E-2/(raster.getWidth()+1-raster.getMinX())); // boolean hasScaleY=!(Math.abs(finalRaster2Model.getScaleY()-1) < // 1E-2/(raster.getHeight()+1-raster.getMinY())); // boolean hasShearX=!(finalRaster2Model.getShearX() == 0.0); // boolean hasShearY=!(finalRaster2Model.getShearY() == 0.0); // boolean hasTranslateX=!(Math.abs(finalRaster2Model.getTranslateX()) < // 0.01F); // boolean hasTranslateY=!(Math.abs(finalRaster2Model.getTranslateY()) < // 0.01F); // boolean isTranslateXInt=!(Math.abs(finalRaster2Model.getTranslateX() - // (int) finalRaster2Model.getTranslateX()) < 0.01F); // boolean isTranslateYInt=!(Math.abs(finalRaster2Model.getTranslateY() - // (int) finalRaster2Model.getTranslateY()) < 0.01F); // // boolean isIdentity = finalRaster2Model.isIdentity() && // !hasScaleX&&!hasScaleY &&!hasTranslateX&&!hasTranslateY; // // TODO how can we check that the a skew is harmelss???? // if(isIdentity){ // // TODO check if we are missing anything like tiling or such that // comes from hints // return new GranuleLoadingResult(raster, granuleLoadingShape, // granuleUrl, doFiltering); // } // // // TOLERANCE ON PIXELS SIZE // // // Check and see if the affine transform is in fact doing // // a Translate operation. That is a scale by 1 and no rotation. // // In which case call translate. Note that only integer translate // // is applicable. For non-integer translate we'll have to do the // // affine. // // If the hints contain an ImageLayout hint, we can't use // // TranslateIntOpImage since it isn't capable of dealing with that. // // Get ImageLayout from renderHints if any. // ImageLayout layout = RIFUtil.getImageLayoutHint(localHints); // if ( !hasScaleX && // !hasScaleY && // !hasShearX&& // !hasShearY&& // isTranslateXInt&& // isTranslateYInt&& // layout == null) { // // It's a integer translate // return new GranuleLoadingResult(new TranslateIntOpImage(raster, // localHints, // (int) finalRaster2Model.getShearX(), // (int) // finalRaster2Model.getShearY()),granuleLoadingShape, granuleUrl, doFiltering); // } ImageWorker iw = new ImageWorker(raster); iw.setRenderingHints(localHints); iw.affine(finalRaster2Model, interpolation, request.getBackgroundValues()); return new GranuleLoadingResult( iw.getRenderedImage(), granuleLoadingShape, granuleUrl, doFiltering); } } catch (IllegalStateException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log( java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ") .append(this.toString()) .append(" with request ") .append(request.toString()) .append(" Resulting in no granule loaded: Empty result") .toString(), e); } return null; } catch (org.opengis.referencing.operation.NoninvertibleTransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log( java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ") .append(this.toString()) .append(" with request ") .append(request.toString()) .append(" Resulting in no granule loaded: Empty result") .toString(), e); } return null; } catch (TransformException e) { if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) { LOGGER.log( java.util.logging.Level.WARNING, new StringBuilder("Unable to load raster for granuleDescriptor ") .append(this.toString()) .append(" with request ") .append(request.toString()) .append(" Resulting in no granule loaded: Empty result") .toString(), e); } return null; } finally { try { if (request.getReadType() != ReadType.JAI_IMAGEREAD && inStream != null) { inStream.close(); } } finally { if (request.getReadType() != ReadType.JAI_IMAGEREAD && reader != null) { reader.dispose(); } } } }
private void init( final BoundingBox granuleBBOX, final URL granuleUrl, final ImageReaderSpi suggestedSPI, final Geometry inclusionGeometry, final boolean heterogeneousGranules, final boolean handleArtifactsFiltering) { this.granuleBBOX = ReferencedEnvelope.reference(granuleBBOX); this.granuleUrl = granuleUrl; this.inclusionGeometry = inclusionGeometry; this.handleArtifactsFiltering = handleArtifactsFiltering; filterMe = handleArtifactsFiltering && inclusionGeometry != null; // create the base grid to world transformation ImageInputStream inStream = null; ImageReader reader = null; try { // // get info about the raster we have to read // // get a stream if (cachedStreamSPI == null) { cachedStreamSPI = ImageIOExt.getImageInputStreamSPI(granuleUrl, true); if (cachedStreamSPI == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided granule " + granuleUrl.toString()); } } assert cachedStreamSPI != null : "no cachedStreamSPI available!"; inStream = cachedStreamSPI.createInputStreamInstance( granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory()); if (inStream == null) { final File file = DataUtilities.urlToFile(granuleUrl); if (file != null) { if (LOGGER.isLoggable(Level.WARNING)) { LOGGER.log(Level.WARNING, Utils.getFileInfo(file)); } } throw new IllegalArgumentException( "Unable to get an input stream for the provided file " + granuleUrl.toString()); } // get a reader and try to cache the suggested SPI first if (cachedReaderSPI == null) { inStream.mark(); if (suggestedSPI != null && suggestedSPI.canDecodeInput(inStream)) { cachedReaderSPI = suggestedSPI; inStream.reset(); } else { inStream.mark(); reader = ImageIOExt.getImageioReader(inStream); if (reader != null) cachedReaderSPI = reader.getOriginatingProvider(); inStream.reset(); } } reader = cachedReaderSPI.createReaderInstance(); if (reader == null) throw new IllegalArgumentException( "Unable to get an ImageReader for the provided file " + granuleUrl.toString()); reader.setInput(inStream); // get selected level and base level dimensions final Rectangle originalDimension = Utils.getDimension(0, reader); // build the g2W for this tile, in principle we should get it // somehow from the tile itself or from the index, but at the moment // we do not have such info, hence we assume that it is a simple // scale and translate final GridToEnvelopeMapper geMapper = new GridToEnvelopeMapper(new GridEnvelope2D(originalDimension), granuleBBOX); geMapper.setPixelAnchor( PixelInCell .CELL_CENTER); // this is the default behavior but it is nice to write it down anyway this.baseGridToWorld = geMapper.createAffineTransform(); try { if (inclusionGeometry != null) { geMapper.setPixelAnchor(PixelInCell.CELL_CORNER); Geometry mapped = JTS.transform(inclusionGeometry, geMapper.createTransform().inverse()); this.granuleROIShape = new ROIGeometry(mapped); } } catch (TransformException e1) { throw new IllegalArgumentException(e1); } // add the base level this.granuleLevels.put( Integer.valueOf(0), new GranuleOverviewLevelDescriptor( 1, 1, originalDimension.width, originalDimension.height)); ////////////////////// Setting overviewController /////////////////////// if (heterogeneousGranules) { // // // // Right now we are setting up overviewsController by assuming that // overviews are internal images as happens in TIFF images // We can improve this by leveraging on coverageReaders // // // // Getting the first level descriptor final GranuleOverviewLevelDescriptor baseOverviewLevelDescriptor = granuleLevels.get(0); // Variables initialization final int numberOfOvervies = reader.getNumImages(true) - 1; final AffineTransform2D baseG2W = baseOverviewLevelDescriptor.getGridToWorldTransform(); final int width = baseOverviewLevelDescriptor.getWidth(); final int height = baseOverviewLevelDescriptor.getHeight(); final double resX = AffineTransform2D.getScaleX0(baseG2W); final double resY = AffineTransform2D.getScaleY0(baseG2W); final double[] highestRes = new double[] {resX, resY}; final double[][] overviewsResolution = new double[numberOfOvervies][2]; // Populating overviews and initializing overviewsController for (int i = 0; i < numberOfOvervies; i++) { overviewsResolution[i][0] = (highestRes[0] * width) / reader.getWidth(i + 1); overviewsResolution[i][1] = (highestRes[1] * height) / reader.getWidth(i + 1); } overviewsController = new OverviewsController(highestRes, numberOfOvervies, overviewsResolution); } ////////////////////////////////////////////////////////////////////////// } catch (IllegalStateException e) { throw new IllegalArgumentException(e); } catch (IOException e) { throw new IllegalArgumentException(e); } finally { // close/dispose stream and readers try { if (inStream != null) { inStream.close(); } } catch (Throwable e) { throw new IllegalArgumentException(e); } finally { if (reader != null) { reader.dispose(); } } } }
public static void doHacks() { if (skipHack("org.apache.cxf.JDKBugHacks.all")) { return; } try { // Use the system classloader as the victim for all this // ClassLoader pinning we're about to do. ClassLoaderHolder orig = ClassLoaderUtils.setThreadContextClassloader(ClassLoader.getSystemClassLoader()); try { try { // Trigger a call to sun.awt.AppContext.getAppContext() if (!skipHack("org.apache.cxf.JDKBugHacks.imageIO")) { ImageIO.getCacheDirectory(); } } catch (Throwable t) { // ignore } try { // DocumentBuilderFactory seems to SOMETIMES pin the classloader if (!skipHack("org.apache.cxf.JDKBugHacks.documentBuilderFactory")) { DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.newDocumentBuilder(); } } catch (Throwable e) { // ignore } // Several components end up calling: // sun.misc.GC.requestLatency(long) // // Those libraries / components known to trigger memory leaks due to // eventual calls to requestLatency(long) are: // - javax.management.remote.rmi.RMIConnectorServer.start() try { if (!skipHack("org.apache.cxf.JDKBugHacks.gcRequestLatency")) { Class<?> clazz = Class.forName("sun.misc.GC"); Method method = clazz.getDeclaredMethod("currentLatencyTarget"); Long l = (Long) method.invoke(null); if (l != null && l.longValue() == 0) { // something already set it, move on method = clazz.getDeclaredMethod("requestLatency", new Class[] {Long.TYPE}); method.invoke(null, Long.valueOf(36000000)); } } } catch (Throwable e) { // ignore } // Calling getPolicy retains a static reference to the context // class loader. try { // Policy.getPolicy(); if (!skipHack("org.apache.cxf.JDKBugHacks.policy")) { Class<?> policyClass = Class.forName("javax.security.auth.Policy"); Method method = policyClass.getMethod("getPolicy"); method.invoke(null); } } catch (Throwable e) { // ignore } try { // Initializing javax.security.auth.login.Configuration retains a static reference // to the context class loader. if (!skipHack("org.apache.cxf.JDKBugHacks.authConfiguration")) { Class.forName( "javax.security.auth.login.Configuration", true, ClassLoader.getSystemClassLoader()); } } catch (Throwable e) { // Ignore } // Creating a MessageDigest during web application startup // initializes the Java Cryptography Architecture. Under certain // conditions this starts a Token poller thread with TCCL equal // to the web application class loader. if (!skipHack("org.apache.cxf.JDKBugHacks.securityProviders")) { java.security.Security.getProviders(); } try { // Several components end up opening JarURLConnections without first // disabling caching. This effectively locks the file. // JAXB does this and thus affects us pretty badly. // Doesn't matter that this JAR doesn't exist - just as long as // the URL is well-formed if (!skipHack("org.apache.cxf.JDKBugHacks.defaultUsesCaches")) { URL url = new URL("jar:file://dummy.jar!/"); URLConnection uConn = new URLConnection(url) { @Override public void connect() throws IOException { // NOOP } }; uConn.setDefaultUseCaches(false); } } catch (Throwable e) { // ignore } } finally { if (orig != null) { orig.reset(); } } } catch (Throwable t) { // ignore } }