/**
  * Invoked when a factory is requested for a specific version. This method should create a factory
  * for the exact version specified by the argument, or return {@code null} if no such factory is
  * available. In the later case, this class will fallback on the factory specified at {@linkplain
  * #URN_AuthorityFactory(AuthorityFactory, String, Citation) construction time}.
  *
  * @param version The version for the factory to create.
  * @return The factory, of {@code null} if there is none for the specified version.
  * @throws FactoryException if an error occured while creating the factory.
  */
 protected AuthorityFactory createVersionedFactory(final Version version) throws FactoryException {
   final Hints hints = new Hints(factory.getImplementationHints());
   hints.put(Hints.VERSION, version);
   final List<AuthorityFactory> factories =
       Arrays.asList(new AuthorityFactory[] {new AllAuthoritiesFactory(hints), factory});
   return FallbackAuthorityFactory.create(factories);
 }
예제 #2
0
  @Override
  public void encodeGeometryColumn(
      GeometryDescriptor gatt, String prefix, int srid, Hints hints, StringBuffer sql) {

    boolean geography =
        "geography".equals(gatt.getUserData().get(JDBCDataStore.JDBC_NATIVE_TYPENAME));

    if (geography) {
      sql.append("encode(ST_AsBinary(");
      encodeColumnName(prefix, gatt.getLocalName(), sql);
      sql.append("),'base64')");
    } else {
      boolean force2D =
          hints != null
              && hints.containsKey(Hints.FEATURE_2D)
              && Boolean.TRUE.equals(hints.get(Hints.FEATURE_2D));

      if (force2D) {
        sql.append("encode(ST_AsBinary(ST_Force_2D(");
        encodeColumnName(prefix, gatt.getLocalName(), sql);
        sql.append(")),'base64')");
      } else {
        sql.append("encode(ST_AsEWKB(");
        encodeColumnName(prefix, gatt.getLocalName(), sql);
        sql.append("),'base64')");
      }
    }
  }
예제 #3
0
  static {
    // getting default hints
    final Hints defaultHints = GeoTools.getDefaultHints();

    // check if someone asked us to use a specific precision model
    final Object o = defaultHints.get(Hints.JTS_PRECISION_MODEL);
    final PrecisionModel pm;
    if (o != null) pm = (PrecisionModel) o;
    else {
      pm = new PrecisionModel();
    }
    GFACTORY = new GeometryFactory(pm, 0);

    // Register manually the GTCrop operation, in web containers JAI registration may fails
    GTCropDescriptor.register();
  }
  /**
   * Creates a new instance of a {@link AIGReader}. I assume nothing about file extension.
   *
   * @param input Source object for which we want to build an {@link AIGReader}.
   * @param hints Hints to be used by this reader throughout his life.
   * @throws DataSourceException
   */
  public AbstractGridCoverage2DReader(Object input, Hints hints) throws DataSourceException {

    //
    // basic management of hints
    //
    if (hints == null) this.hints = new Hints();
    if (hints != null) {
      this.hints = hints.clone();
    }

    // GridCoverageFactory initialization
    if (this.hints.containsKey(Hints.GRID_COVERAGE_FACTORY)) {
      final Object factory = this.hints.get(Hints.GRID_COVERAGE_FACTORY);
      if (factory != null && factory instanceof GridCoverageFactory) {
        this.coverageFactory = (GridCoverageFactory) factory;
      }
    }
    if (this.coverageFactory == null) {
      this.coverageFactory = CoverageFactoryFinder.getGridCoverageFactory(this.hints);
    }

    //
    // Setting input
    //
    if (input == null) {
      final IOException ex = new IOException(Errors.format(ErrorKeys.NULL_ARGUMENT_$1, "input"));
      throw new DataSourceException(ex);
    }
    this.source = input;
  }
예제 #5
0
  @Test
  public void testGoogleWorld() throws Exception {
    File world = TestData.copy(this, "geotiff/world.tiff");
    RenderedImage image = ImageIO.read(world);

    final CoordinateReferenceSystem wgs84 = CRS.decode("EPSG:4326", true);
    Envelope2D envelope = new Envelope2D(wgs84, -180, -90, 360, 180);
    GridCoverage2D gcFullWorld = new GridCoverageFactory().create("world", image, envelope);

    // crop, we cannot reproject it fully to the google projection
    final Envelope2D cropEnvelope = new Envelope2D(wgs84, -180, -80, 360, 160);
    GridCoverage2D gcCropWorld =
        (GridCoverage2D) Operations.DEFAULT.crop(gcFullWorld, cropEnvelope);

    // resample
    Hints.putSystemDefault(Hints.RESAMPLE_TOLERANCE, 0d);
    GridCoverage2D gcResampled =
        (GridCoverage2D)
            Operations.DEFAULT.resample(
                gcCropWorld,
                CRS.decode("EPSG:3857"),
                null,
                Interpolation.getInstance(Interpolation.INTERP_BILINEAR));

    File expected =
        new File("src/test/resources/org/geotools/image/test-data/google-reproject.png");
    // allow one row of difference
    ImageAssert.assertEquals(expected, gcResampled.getRenderedImage(), 600);
  }
예제 #6
0
 /** Set up common objects used for all tests. */
 @Before
 public void setUp() {
   coverage = EXAMPLES.get(0);
   indexedCoverage = EXAMPLES.get(2);
   indexedCoverageWithTransparency = EXAMPLES.get(3);
   floatCoverage = EXAMPLES.get(4);
   ushortCoverage = EXAMPLES.get(5);
   Hints.putSystemDefault(Hints.RESAMPLE_TOLERANCE, 0.333);
 }
예제 #7
0
  /**
   * Tests the "Resample" operation with a stereographic coordinate system on a paletted image
   *
   * @throws FactoryException
   * @throws NoSuchAuthorityCodeException
   */
  @Test
  public void testReprojectPalette() throws NoSuchAuthorityCodeException, FactoryException {

    // do it again, make sure the image does not turn black since
    GridCoverage2D input = ushortCoverage;
    // Create a Palette image from the input coverage
    RenderedImage src = input.getRenderedImage();
    ImageWorker iw = new ImageWorker(src).rescaleToBytes().forceIndexColorModel(false);
    src = iw.getRenderedOperation();

    // Setting Force ReplaceIndexColorModel and CoverageProcessingView as SAME
    Hints hints = GeoTools.getDefaultHints().clone();
    hints.put(JAI.KEY_REPLACE_INDEX_COLOR_MODEL, true);
    hints.put(Hints.COVERAGE_PROCESSING_VIEW, ViewType.SAME);

    // Create a new GridCoverage
    GridCoverageFactory factory = new GridCoverageFactory(hints);
    GridCoverage2D palette = factory.create("test", src, input.getEnvelope());

    CoordinateReferenceSystem targetCRS = CRS.parseWKT(GOOGLE_MERCATOR_WKT);
    GridCoverage2D coverage_ = project(palette, targetCRS, null, "bilinear", hints, true);

    // reproject the ushort and check that things did not go bad, that is it turned black
    coverage_ = (GridCoverage2D) Operations.DEFAULT.extrema(coverage_);
    Object minimums = coverage_.getProperty(Extrema.GT_SYNTHETIC_PROPERTY_MINIMUM);
    Assert.assertTrue(minimums instanceof double[]);
    final double[] mins = (double[]) minimums;
    Object maximums = coverage_.getProperty(Extrema.GT_SYNTHETIC_PROPERTY_MAXIMUM);
    Assert.assertTrue(maximums instanceof double[]);
    final double[] max = (double[]) maximums;
    boolean fail = true;
    for (int i = 0; i < mins.length; i++) if (mins[i] != max[i] && max[i] > 0) fail = false;
    Assert.assertFalse("Reprojection failed", fail);

    // Ensure the CRS is correct
    CoordinateReferenceSystem targetCoverageCRS = coverage_.getCoordinateReferenceSystem();
    Assert.assertTrue(CRS.equalsIgnoreMetadata(targetCRS, targetCoverageCRS));
  }
예제 #8
0
 /** Constructs an authority factory using the specified hints and priority. */
 protected FactoryUsingWKT(final Hints userHints, final int priority) {
   super(userHints, priority);
   factories = ReferencingFactoryContainer.instance(userHints);
   Object hint = null;
   if (userHints != null) {
     hint = userHints.get(Hints.CRS_AUTHORITY_EXTRA_DIRECTORY);
   }
   if (hint instanceof File) {
     directory = (File) hint;
   } else if (hint instanceof String) {
     directory = new File((String) hint);
   } else {
     directory = null;
   }
   hints.put(Hints.CRS_AUTHORITY_EXTRA_DIRECTORY, directory);
   // Disposes the cached property file after at least 15 minutes of inactivity.
   setTimeout(15 * 60 * 1000L);
 }
예제 #9
0
 /**
  * Tests the "Resample" operation with a stereographic coordinate system.
  *
  * @throws FactoryException If the CRS can't not be created.
  */
 @Test
 public void testsNad83() throws FactoryException {
   Hints.putSystemDefault(Hints.RESAMPLE_TOLERANCE, 0.0);
   final Hints photo = new Hints(Hints.COVERAGE_PROCESSING_VIEW, ViewType.PHOTOGRAPHIC);
   final CoordinateReferenceSystem crs =
       CRS.parseWKT(
           "GEOGCS[\"NAD83\","
               + "DATUM[\"North_American_Datum_1983\","
               + "SPHEROID[\"GRS 1980\",6378137,298.257222101,AUTHORITY[\"EPSG\",\"7019\"]],"
               + "TOWGS84[0,0,0,0,0,0,0],AUTHORITY[\"EPSG\",\"6269\"]],"
               + "PRIMEM[\"Greenwich\",0, AUTHORITY[\"EPSG\",\"8901\"]],"
               + "UNIT[\"degree\",0.0174532925199433,AUTHORITY[\"EPSG\",\"9108\"]],"
               + "AXIS[\"Lat\",NORTH],"
               + "AXIS[\"Long\",EAST],"
               + "AUTHORITY[\"EPSG\",\"4269\"]]");
   assertEquals("Warp", showProjected(indexedCoverage, crs, null, null, false));
   assertEquals("Warp", showProjected(indexedCoverageWithTransparency, crs, null, null, false));
   assertEquals("Warp", showProjected(floatCoverage, crs, null, photo, true));
 }
예제 #10
0
 static {
   hints.add(LENIENT_HINT);
 }
 @After
 public void tearDown() {
   Hints.removeSystemDefault(Hints.FORCE_LONGITUDE_FIRST_AXIS_ORDER);
 }
 Object convertSafe(Object source, Class<?> target) throws Exception {
   Hints hints = new Hints();
   hints.put(ConverterFactory.SAFE_CONVERSION, new Boolean(true));
   return factory.createConverter(source.getClass(), target, hints).convert(source, target);
 }
 protected void setUp() throws Exception {
   Hints.putSystemDefault(Hints.FORCE_LONGITUDE_FIRST_AXIS_ORDER, true);
 }
  public AbstractMappingFeatureIterator(
      AppSchemaDataAccess store,
      FeatureTypeMapping mapping,
      Query query,
      Query unrolledQuery,
      boolean removeQueryLimitIfDenormalised,
      boolean hasPostFilter)
      throws IOException {
    this.store = store;
    this.attf = new AppSchemaFeatureFactoryImpl();

    this.mapping = mapping;

    // validate and initialise resolve options
    Hints hints = query.getHints();
    ResolveValueType resolveVal = (ResolveValueType) hints.get(Hints.RESOLVE);
    boolean resolve =
        ResolveValueType.ALL.equals(resolveVal) || ResolveValueType.LOCAL.equals(resolveVal);
    if (!resolve && resolveVal != null && !ResolveValueType.NONE.equals(resolveVal)) {
      throw new IllegalArgumentException(
          "Resolve:" + resolveVal.getName() + " is not supported in app-schema!");
    }
    Integer atd = (Integer) hints.get(Hints.ASSOCIATION_TRAVERSAL_DEPTH);
    resolveDepth = resolve ? atd == null ? 0 : atd : 0;
    resolveTimeOut = (Integer) hints.get(Hints.RESOLVE_TIMEOUT);

    namespaces = mapping.getNamespaces();
    namespaceAwareFilterFactory = new FilterFactoryImplNamespaceAware(namespaces);

    Object includeProps = query.getHints().get(Query.INCLUDE_MANDATORY_PROPS);
    includeMandatory = includeProps instanceof Boolean && ((Boolean) includeProps).booleanValue();

    if (mapping.isDenormalised()) {
      // we need to disable the max number of features retrieved so we can
      // sort them manually just in case the data is denormalised.  Do this
      // by overriding the max features for this query just before executing
      // it.  Note that the original maxFeatures value was copied to
      // this.requestMaxFeatures in the constructor and will be re-applied after
      // the rows have been returned
      if (removeQueryLimitIfDenormalised) {
        this.dataMaxFeatures = 1000000;
        if (hasPostFilter) {
          // true max features will be handled in PostFilteringMappingFeatureIterator
          this.requestMaxFeatures = 1000000;
        } else {
          this.requestMaxFeatures = query.getMaxFeatures();
        }
      } else {
        this.dataMaxFeatures = query.getMaxFeatures();
        this.requestMaxFeatures = query.getMaxFeatures();
      }
    } else {
      this.requestMaxFeatures = query.getMaxFeatures();
      this.dataMaxFeatures = query.getMaxFeatures();
    }

    if (unrolledQuery == null) {
      unrolledQuery = getUnrolledQuery(query);
      if (query instanceof JoiningQuery && unrolledQuery instanceof JoiningQuery) {
        ((JoiningQuery) unrolledQuery).setRootMapping(((JoiningQuery) query).getRootMapping());
      }
    }

    // NC - property names
    if (query != null && query.getProperties() != null) {
      setPropertyNames(query.getProperties());
    } else {
      setPropertyNames(null); // we need the actual property names (not surrogates) to do
      // this...
    }
    xpathAttributeBuilder = new XPath();
    xpathAttributeBuilder.setFeatureFactory(attf);
    initialiseSourceFeatures(mapping, unrolledQuery, query.getCoordinateSystemReproject());
    xpathAttributeBuilder.setFilterFactory(namespaceAwareFilterFactory);
  }
예제 #15
0
  /**
   * Tests the creation of new coordinate reference systems.
   *
   * @throws FactoryException if a coordinate reference system can't be created.
   */
  @Test
  public void testCreation() throws FactoryException {
    out.println();
    out.println("Testing CRS creations");
    out.println("---------------------");
    out.println();
    out.println("create Coodinate Reference System....1: ");
    final DatumFactory datumFactory = ReferencingFactoryFinder.getDatumFactory(null);
    final CSFactory csFactory = ReferencingFactoryFinder.getCSFactory(null);
    final CRSFactory crsFactory = ReferencingFactoryFinder.getCRSFactory(null);
    final MathTransformFactory mtFactory = ReferencingFactoryFinder.getMathTransformFactory(null);

    final Ellipsoid airy1830;
    final Unit<Length> meters = SI.METER;
    airy1830 = datumFactory.createEllipsoid(name("Airy1830"), 6377563.396, 6356256.910, meters);
    out.println();
    out.println("create Coodinate Reference System....2: ");
    out.println(airy1830.toWKT());

    final PrimeMeridian greenwich;
    final Unit<Angle> degrees = NonSI.DEGREE_ANGLE;
    greenwich = datumFactory.createPrimeMeridian(name("Greenwich"), 0, degrees);
    out.println();
    out.println("create Coodinate Reference System....3: ");
    out.println(greenwich.toWKT());

    // NOTE: we could use the following pre-defined constant instead:
    //       DefaultPrimeMeridian.GREENWICH;
    final GeodeticDatum datum;
    datum = datumFactory.createGeodeticDatum(name("Airy1830"), airy1830, greenwich);
    out.println();
    out.println("create Coodinate Reference System....4: ");
    out.println(datum.toWKT());

    // NOTE: we could use the following pre-defined constant instead:
    //       DefaultEllipsoidalCS.GEODETIC_2D;
    final EllipsoidalCS ellCS;
    ellCS =
        csFactory.createEllipsoidalCS(
            name("Ellipsoidal"),
            csFactory.createCoordinateSystemAxis(
                name("Longitude"), "long", AxisDirection.EAST, degrees),
            csFactory.createCoordinateSystemAxis(
                name("Latitude"), "lat", AxisDirection.NORTH, degrees));
    out.println();
    out.println("create Coodinate Reference System....5: ");
    out.println(ellCS); // No WKT for coordinate systems

    final GeographicCRS geogCRS;
    geogCRS = crsFactory.createGeographicCRS(name("Airy1830"), datum, ellCS);
    out.println();
    out.println("create Coodinate Reference System....6: ");
    out.println(geogCRS.toWKT());

    final MathTransform p;
    final ParameterValueGroup param = mtFactory.getDefaultParameters("Transverse_Mercator");
    param.parameter("semi_major").setValue(airy1830.getSemiMajorAxis());
    param.parameter("semi_minor").setValue(airy1830.getSemiMinorAxis());
    param.parameter("central_meridian").setValue(49);
    param.parameter("latitude_of_origin").setValue(-2);
    param.parameter("false_easting").setValue(400000);
    param.parameter("false_northing").setValue(-100000);
    out.println();
    out.println("create Coodinate System....7: ");
    out.println(param);

    // NOTE: we could use the following pre-defined constant instead:
    //       DefaultCartesianCS.PROJECTED;
    final CartesianCS cartCS;
    cartCS =
        csFactory.createCartesianCS(
            name("Cartesian"),
            csFactory.createCoordinateSystemAxis(name("Easting"), "x", AxisDirection.EAST, meters),
            csFactory.createCoordinateSystemAxis(
                name("Northing"), "y", AxisDirection.NORTH, meters));
    out.println();
    out.println("create Coodinate Reference System....8: ");
    out.println(cartCS); // No WKT for coordinate systems

    final Hints hints = new Hints();
    hints.put(Hints.DATUM_FACTORY, datumFactory);
    hints.put(Hints.CS_FACTORY, csFactory);
    hints.put(Hints.CRS_FACTORY, crsFactory);
    hints.put(Hints.MATH_TRANSFORM_FACTORY, mtFactory);

    final ReferencingFactoryContainer container = new ReferencingFactoryContainer(hints);
    assertSame(datumFactory, container.getDatumFactory());
    assertSame(csFactory, container.getCSFactory());
    assertSame(crsFactory, container.getCRSFactory());
    assertSame(mtFactory, container.getMathTransformFactory());

    final Conversion conversion = new DefiningConversion("GBN grid", param);
    final ProjectedCRS projCRS =
        crsFactory.createProjectedCRS(
            name("Great_Britian_National_Grid"), geogCRS, conversion, cartCS);
    out.println();
    out.println("create Coodinate System....9: ");
    out.println(projCRS.toWKT());
  }
예제 #16
0
  @Test
  public void NetCDFTestAscatL1()
      throws NoSuchAuthorityCodeException, FactoryException, IOException, ParseException {
    File mosaic = new File(TestData.file(this, "."), "NetCDFTestAscatL1");
    if (mosaic.exists()) {
      FileUtils.deleteDirectory(mosaic);
    }
    assertTrue(mosaic.mkdirs());
    File file = TestData.file(this, "ascatl1.nc");
    FileUtils.copyFileToDirectory(file, mosaic);
    file = new File(mosaic, "ascatl1.nc");

    final Hints hints =
        new Hints(Hints.DEFAULT_COORDINATE_REFERENCE_SYSTEM, CRS.decode("EPSG:4326", true));
    hints.add(new Hints(Utils.EXCLUDE_MOSAIC, true));

    // Get format
    final AbstractGridFormat format =
        (AbstractGridFormat) GridFormatFinder.findFormat(file.toURI().toURL(), hints);
    final NetCDFReader reader = (NetCDFReader) format.getReader(file.toURI().toURL(), hints);

    assertNotNull(format);
    try {
      String[] names = reader.getGridCoverageNames();
      names = new String[] {names[1]};

      for (String coverageName : names) {

        final String[] metadataNames = reader.getMetadataNames(coverageName);
        assertNotNull(metadataNames);
        assertEquals(17, metadataNames.length);

        // Parsing metadata values
        assertEquals("false", reader.getMetadataValue(coverageName, "HAS_TIME_DOMAIN"));

        assertEquals("false", reader.getMetadataValue(coverageName, "HAS_ELEVATION_DOMAIN"));

        assertEquals("true", reader.getMetadataValue(coverageName, "HAS_NUMSIGMA_DOMAIN"));
        final String sigmaMetadata = reader.getMetadataValue(coverageName, "NUMSIGMA_DOMAIN");
        assertNotNull(sigmaMetadata);
        assertEquals("0,1,2", sigmaMetadata);
        assertEquals(3, sigmaMetadata.split(",").length);

        // subsetting the envelope
        final ParameterValue<GridGeometry2D> gg =
            AbstractGridFormat.READ_GRIDGEOMETRY2D.createValue();
        final GeneralEnvelope originalEnvelope = reader.getOriginalEnvelope(coverageName);
        final GeneralEnvelope reducedEnvelope =
            new GeneralEnvelope(
                new double[] {
                  originalEnvelope.getLowerCorner().getOrdinate(0),
                  originalEnvelope.getLowerCorner().getOrdinate(1)
                },
                new double[] {
                  originalEnvelope.getMedian().getOrdinate(0),
                  originalEnvelope.getMedian().getOrdinate(1)
                });
        reducedEnvelope.setCoordinateReferenceSystem(
            reader.getCoordinateReferenceSystem(coverageName));

        // Selecting bigger gridRange for a zoomed result
        final Dimension dim = new Dimension();
        GridEnvelope gridRange = reader.getOriginalGridRange(coverageName);
        dim.setSize(gridRange.getSpan(0) * 4.0, gridRange.getSpan(1) * 2.0);
        final Rectangle rasterArea = ((GridEnvelope2D) gridRange);
        rasterArea.setSize(dim);
        final GridEnvelope2D range = new GridEnvelope2D(rasterArea);
        gg.setValue(new GridGeometry2D(range, reducedEnvelope));

        ParameterValue<List<String>> sigmaValue = null;
        final String selectedSigma = "1";
        Set<ParameterDescriptor<List>> params = reader.getDynamicParameters(coverageName);
        for (ParameterDescriptor param : params) {
          if (param.getName().getCode().equalsIgnoreCase("NUMSIGMA")) {
            sigmaValue = param.createValue();
            sigmaValue.setValue(
                new ArrayList<String>() {
                  {
                    add(selectedSigma);
                  }
                });
          }
        }

        GeneralParameterValue[] values = new GeneralParameterValue[] {gg, sigmaValue};
        GridCoverage2D coverage = reader.read(coverageName, values);
        assertNotNull(coverage);
        if (TestData.isInteractiveTest()) {
          coverage.show();
        } else {
          PlanarImage.wrapRenderedImage(coverage.getRenderedImage()).getTiles();
        }
      }
    } catch (Throwable t) {
      throw new RuntimeException(t);
    } finally {
      if (reader != null) {
        try {
          reader.dispose();
        } catch (Throwable t) {
          // Does nothing
        }
      }
    }
  }
예제 #17
0
  /**
   * This method reads in the TIFF image, constructs an appropriate CRS, determines the math
   * transform from raster to the CRS model, and constructs a GridCoverage.
   *
   * @param params currently ignored, potentially may be used for hints.
   * @return grid coverage represented by the image
   * @throws IOException on any IO related troubles
   */
  public GridCoverage2D read(GeneralParameterValue[] params) throws IOException {
    GeneralEnvelope requestedEnvelope = null;
    Rectangle dim = null;
    Color inputTransparentColor = null;
    OverviewPolicy overviewPolicy = null;
    int[] suggestedTileSize = null;
    if (params != null) {

      //
      // Checking params
      //
      if (params != null) {
        for (int i = 0; i < params.length; i++) {
          final ParameterValue param = (ParameterValue) params[i];
          final ReferenceIdentifier name = param.getDescriptor().getName();
          if (name.equals(AbstractGridFormat.READ_GRIDGEOMETRY2D.getName())) {
            final GridGeometry2D gg = (GridGeometry2D) param.getValue();
            requestedEnvelope = new GeneralEnvelope((Envelope) gg.getEnvelope2D());
            dim = gg.getGridRange2D().getBounds();
            continue;
          }
          if (name.equals(AbstractGridFormat.OVERVIEW_POLICY.getName())) {
            overviewPolicy = (OverviewPolicy) param.getValue();
            continue;
          }
          if (name.equals(AbstractGridFormat.INPUT_TRANSPARENT_COLOR.getName())) {
            inputTransparentColor = (Color) param.getValue();
            continue;
          }
          if (name.equals(AbstractGridFormat.SUGGESTED_TILE_SIZE.getName())) {
            String suggestedTileSize_ = (String) param.getValue();
            if (suggestedTileSize_ != null && suggestedTileSize_.length() > 0) {
              suggestedTileSize_ = suggestedTileSize_.trim();
              int commaPosition = suggestedTileSize_.indexOf(",");
              if (commaPosition < 0) {
                int tileDim = Integer.parseInt(suggestedTileSize_);
                suggestedTileSize = new int[] {tileDim, tileDim};
              } else {
                int tileW = Integer.parseInt(suggestedTileSize_.substring(0, commaPosition));
                int tileH = Integer.parseInt(suggestedTileSize_.substring(commaPosition + 1));
                suggestedTileSize = new int[] {tileW, tileH};
              }
            }
            continue;
          }
        }
      }
    }

    //
    // set params
    //
    Integer imageChoice = new Integer(0);
    final ImageReadParam readP = new ImageReadParam();
    try {
      imageChoice = setReadParams(overviewPolicy, readP, requestedEnvelope, dim);
    } catch (TransformException e) {
      new DataSourceException(e);
    }

    //
    // IMAGE READ OPERATION
    //
    Hints newHints = null;
    if (suggestedTileSize != null) {
      newHints = hints.clone();
      final ImageLayout layout = new ImageLayout();
      layout.setTileGridXOffset(0);
      layout.setTileGridYOffset(0);
      layout.setTileHeight(suggestedTileSize[1]);
      layout.setTileWidth(suggestedTileSize[0]);
      newHints.add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout));
    }
    final ParameterBlock pbjRead = new ParameterBlock();
    if (extOvrImgChoice >= 0 && imageChoice >= extOvrImgChoice) {
      pbjRead.add(
          ovrInStreamSPI.createInputStreamInstance(
              ovrSource, ImageIO.getUseCache(), ImageIO.getCacheDirectory()));
      pbjRead.add(imageChoice - extOvrImgChoice);
    } else {
      pbjRead.add(
          inStreamSPI != null
              ? inStreamSPI.createInputStreamInstance(
                  source, ImageIO.getUseCache(), ImageIO.getCacheDirectory())
              : ImageIO.createImageInputStream(source));
      pbjRead.add(imageChoice);
    }
    pbjRead.add(Boolean.FALSE);
    pbjRead.add(Boolean.FALSE);
    pbjRead.add(Boolean.FALSE);
    pbjRead.add(null);
    pbjRead.add(null);
    pbjRead.add(readP);
    pbjRead.add(READER_SPI.createReaderInstance());
    RenderedOp coverageRaster =
        JAI.create("ImageRead", pbjRead, newHints != null ? (RenderingHints) newHints : null);

    //
    // MASKING INPUT COLOR as indicated
    //
    if (inputTransparentColor != null) {
      coverageRaster =
          new ImageWorker(coverageRaster)
              .setRenderingHints(newHints)
              .makeColorTransparent(inputTransparentColor)
              .getRenderedOperation();
    }

    AffineTransform rasterToModel = getRescaledRasterToModel(coverageRaster);
    try {
      return createCoverage(coverageRaster, ProjectiveTransform.create(rasterToModel));
    } catch (Exception e) {
      // dispose and close file
      ImageUtilities.disposePlanarImageChain(coverageRaster);

      // rethrow
      if (e instanceof DataSourceException) {
        throw (DataSourceException) e;
      }
      throw new DataSourceException(e);
    }
  }
  /**
   * Returns the specified property.
   *
   * @param name Property name.
   * @param opNode Operation node.
   */
  public Object getProperty(String name, Object opNode) {
    validate(name, opNode);

    if (opNode instanceof RenderedOp && name.equalsIgnoreCase("roi")) {
      RenderedOp op = (RenderedOp) opNode;

      ParameterBlock pb = op.getParameterBlock();

      // Retrieve the rendered source image and its ROI.
      RenderedImage src = pb.getRenderedSource(0);
      Object property = src.getProperty("ROI");
      if (property == null
          || property.equals(java.awt.Image.UndefinedProperty)
          || !(property instanceof ROI)) {
        return java.awt.Image.UndefinedProperty;
      }

      // Return undefined also if source ROI is empty.
      ROI srcROI = (ROI) property;
      if (srcROI.getBounds().isEmpty()) {
        return java.awt.Image.UndefinedProperty;
      }

      // Retrieve the Interpolation object.
      Interpolation interp = (Interpolation) pb.getObjectParameter(1);

      // Determine the effective source bounds.
      Rectangle srcBounds = null;
      PlanarImage dst = op.getRendering();
      if (dst instanceof GeometricOpImage && ((GeometricOpImage) dst).getBorderExtender() == null) {
        srcBounds =
            new Rectangle(
                src.getMinX() + interp.getLeftPadding(),
                src.getMinY() + interp.getTopPadding(),
                src.getWidth() - interp.getWidth() + 1,
                src.getHeight() - interp.getHeight() + 1);
      } else {
        srcBounds = new Rectangle(src.getMinX(), src.getMinY(), src.getWidth(), src.getHeight());
      }

      // If necessary, clip the ROI to the effective source bounds.
      if (!srcBounds.contains(srcROI.getBounds())) {
        srcROI = srcROI.intersect(new ROIShape(srcBounds));
      }

      // Retrieve the Warp object.
      Warp warp = (Warp) pb.getObjectParameter(0);

      // Setting constant image to be warped as a ROI
      Rectangle dstBounds = op.getBounds();

      // Setting layout of the constant image
      ImageLayout2 layout = new ImageLayout2();
      int minx = (int) srcBounds.getMinX();
      int miny = (int) srcBounds.getMinY();
      int w = (int) srcBounds.getWidth();
      int h = (int) srcBounds.getHeight();
      layout.setMinX(minx);
      layout.setMinY(miny);
      layout.setWidth(w);
      layout.setHeight(h);
      RenderingHints hints = op.getRenderingHints();
      hints.add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout));

      final PlanarImage constantImage =
          ConstantDescriptor.create(new Float(w), new Float(h), new Byte[] {(byte) 255}, hints);

      PlanarImage roiImage = null;

      // Make sure to specify tileCache, tileScheduler, tileRecyclier, by cloning hints.
      RenderingHints warpingHints = op.getRenderingHints();
      warpingHints.remove(JAI.KEY_IMAGE_LAYOUT);

      // Creating warped roi by the same way (Warp, Interpolation, source ROI) we warped the
      // input image.
      final ParameterBlock paramBlk = new ParameterBlock();
      paramBlk.addSource(constantImage);
      paramBlk.add(warp);
      paramBlk.add(interp);
      paramBlk.add(null);
      paramBlk.add(srcROI);

      // force in the image layout, this way we get exactly the same
      // as the affine we're eliminating
      Hints localHints = new Hints(op.getRenderingHints());
      localHints.remove(JAI.KEY_IMAGE_LAYOUT);
      ImageLayout il = new ImageLayout();
      il.setMinX(dstBounds.x);
      il.setMinY(dstBounds.y);
      il.setWidth(dstBounds.width);
      il.setHeight(dstBounds.height);
      localHints.put(JAI.KEY_IMAGE_LAYOUT, il);
      roiImage = JAI.create("Warp", paramBlk, localHints);
      ROI dstROI = new ROI(roiImage, 1);

      // If necessary, clip the warped ROI to the destination bounds.
      if (!dstBounds.contains(dstROI.getBounds())) {
        dstROI = dstROI.intersect(new ROIShape(dstBounds));
      }

      // Return the warped and possibly clipped ROI.
      return dstROI;
    }

    return java.awt.Image.UndefinedProperty;
  }
  @RequestMapping(
      value = "/projects/{id}/cleanse",
      method = RequestMethod.POST,
      produces = "application/xml")
  @PreAuthorize("hasPermission(#project, 'write')")
  public void processCleanse(
      @ModelAttribute(value = "project") Project project,
      @RequestParam(value = "operation", required = true) String operation,
      @RequestParam(value = "fromDate", required = false) String fromDateString,
      @RequestParam(value = "toDate", required = false) String toDateString,
      @RequestParam(value = "animal") List<Long> animalIds,
      @RequestParam(value = "maxSpeed", required = false) Double maxSpeed,
      @RequestParam(value = "minArgosClass", required = false) String minArgosClassCode,
      @RequestParam(value = "maxDop", required = false) Double maxDop,
      HttpServletRequest request,
      HttpServletResponse response)
      throws IOException, RserveInterfaceException {
    Date fromDate = null;
    Date toDate = null;
    try {
      if (StringUtils.isNotBlank(fromDateString)) {
        fromDate = isoDateFormat.parse(fromDateString);
      }
      if (StringUtils.isNotBlank(toDateString)) {
        toDate = (toDateString == null) ? null : isoDateFormat.parse(toDateString);
      }
    } catch (java.text.ParseException e1) {
      PrintWriter out = response.getWriter();
      out.append("<?xml version=\"1.0\"?>\n");
      out.append("<cleanse-response xmlns=\"http://oztrack.org/xmlns#\">\n");
      out.append("    <error>Invalid date parameters</error>\n");
      out.append("</cleanse-response>\n");
      response.setStatus(200);
      return;
    }

    MultiPolygon multiPolygon = null;
    String[] polygonsWkt = request.getParameterValues("polygon");
    if ((polygonsWkt != null) && (polygonsWkt.length > 0)) {
      Hints hints = new Hints();
      hints.put(Hints.CRS, DefaultGeographicCRS.WGS84);
      GeometryFactory geometryFactory = JTSFactoryFinder.getGeometryFactory(hints);
      WKTReader reader = new WKTReader(geometryFactory);
      ArrayList<Polygon> polygons = new ArrayList<Polygon>();
      for (String polygonWkt : polygonsWkt) {
        try {
          Polygon polygon = (Polygon) reader.read(polygonWkt);
          polygons.add(polygon);
        } catch (ParseException e) {
          throw new RuntimeException("Error reading polygon: " + polygonWkt, e);
        }
      }
      multiPolygon = geometryFactory.createMultiPolygon(polygons.toArray(new Polygon[0]));
    }

    Set<PositionFix> speedFilterPositionFixes = null;
    if (maxSpeed != null) {
      speedFilterPositionFixes = new HashSet<PositionFix>();
      SearchQuery searchQuery = new SearchQuery();
      searchQuery.setProject(project);
      searchQuery.setFromDate(fromDate);
      searchQuery.setToDate(toDate);
      searchQuery.setAnimalIds(animalIds);
      List<PositionFix> positionFixList = positionFixDao.getProjectPositionFixList(searchQuery);
      RserveInterface rserveInterface = new RserveInterface(rserveConnectionPool);
      Map<Long, Set<Date>> animalDates =
          rserveInterface.runSpeedFilter(project, positionFixList, maxSpeed);
      for (PositionFix positionFix : positionFixList) {
        Set<Date> dates = animalDates.get(positionFix.getAnimal().getId());
        // Need to create new java.util.Date here because positionFix.detectionTime is a
        // java.sql.Timestamp.
        // Date and Timestamp have same hashCode but their equals methods differ, breaking contains
        // call.
        if ((dates != null) && dates.contains(new Date(positionFix.getDetectionTime().getTime()))) {
          speedFilterPositionFixes.add(positionFix);
        }
      }
    }

    ArgosClass minArgosClass = ArgosClass.fromCode(minArgosClassCode);

    if (operation.equals("delete")) {
      int numDeleted =
          positionFixDao.setDeleted(
              project,
              fromDate,
              toDate,
              animalIds,
              multiPolygon,
              speedFilterPositionFixes,
              minArgosClass,
              maxDop,
              true);
      positionFixDao.renumberPositionFixes(project);
      PrintWriter out = response.getWriter();
      out.append("<?xml version=\"1.0\"?>\n");
      out.append("<cleanse-response xmlns=\"http://oztrack.org/xmlns#\">\n");
      out.append("    <num-deleted>" + numDeleted + "</num-deleted>\n");
      out.append("</cleanse-response>\n");
      response.setStatus(200);
      return;
    } else if (operation.equals("undelete")) {
      int numUndeleted =
          positionFixDao.setDeleted(
              project,
              fromDate,
              toDate,
              animalIds,
              multiPolygon,
              speedFilterPositionFixes,
              minArgosClass,
              maxDop,
              false);
      positionFixDao.renumberPositionFixes(project);
      PrintWriter out = response.getWriter();
      out.append("<?xml version=\"1.0\"?>\n");
      out.append("<cleanse-response xmlns=\"http://oztrack.org/xmlns#\">\n");
      out.append("    <num-undeleted>" + numUndeleted + "</num-undeleted>\n");
      out.append("</cleanse-response>\n");
      response.setStatus(200);
      return;
    } else {
      PrintWriter out = response.getWriter();
      out.append("<?xml version=\"1.0\"?>\n");
      out.append("<cleanse-response xmlns=\"http://oztrack.org/xmlns#\">\n");
      out.append("    <error>" + "Unknown operation: " + operation + "</error>\n");
      out.append("</cleanse-response>\n");
      response.setStatus(400);
      return;
    }
  }
  public void contextInitialized(ServletContextEvent sce) {
    // start up tctool - remove it before committing!!!!
    // new tilecachetool.TCTool().setVisible(true);

    // Register logging, and bridge to JAI logging
    GeoTools.init((Hints) null);

    // Custom GeoTools ImagingListener used to ignore common warnings
    JAI.getDefaultInstance()
        .setImagingListener(
            new ImagingListener() {
              final Logger LOGGER = Logging.getLogger("javax.media.jai");

              @Override
              public boolean errorOccurred(
                  String message, Throwable thrown, Object where, boolean isRetryable)
                  throws RuntimeException {
                if (isSerializableRenderedImageFinalization(where, thrown)) {
                  LOGGER.log(Level.FINEST, message, thrown);
                } else if (message.contains("Continuing in pure Java mode")) {
                  LOGGER.log(Level.FINE, message, thrown);
                } else {
                  LOGGER.log(Level.INFO, message, thrown);
                }
                return false; // we are not trying to recover
              }

              private boolean isSerializableRenderedImageFinalization(Object where, Throwable t) {
                if (!(where instanceof SerializableRenderedImage)) {
                  return false;
                }

                // check if it's the finalizer
                StackTraceElement[] elements = t.getStackTrace();
                for (StackTraceElement element : elements) {
                  if (element.getMethodName().equals("finalize")
                      && element.getClassName().endsWith("SerializableRenderedImage")) return true;
                }

                return false;
              }
            });

    // setup concurrent operation registry
    JAI jaiDef = JAI.getDefaultInstance();
    if (!(jaiDef.getOperationRegistry() instanceof ConcurrentOperationRegistry
        || jaiDef.getOperationRegistry()
            instanceof it.geosolutions.jaiext.ConcurrentOperationRegistry)) {
      jaiDef.setOperationRegistry(ConcurrentOperationRegistry.initializeRegistry());
    }

    // setup the concurrent tile cache (has proper memory limit handling also for small tiles)
    if (!(jaiDef.getTileCache() instanceof ConcurrentTileCacheMultiMap)) {
      jaiDef.setTileCache(new ConcurrentTileCacheMultiMap());
    }

    // make sure we remember if GeoServer controls logging or not
    String strValue =
        GeoServerExtensions.getProperty(
            LoggingUtils.RELINQUISH_LOG4J_CONTROL, sce.getServletContext());
    relinquishLoggingControl = Boolean.valueOf(strValue);

    // if the server admin did not set it up otherwise, force X/Y axis
    // ordering
    // This one is a good place because we need to initialize this property
    // before any other opeation can trigger the initialization of the CRS
    // subsystem
    if (System.getProperty("org.geotools.referencing.forceXY") == null) {
      System.setProperty("org.geotools.referencing.forceXY", "true");
    }
    if (Boolean.TRUE.equals(Hints.getSystemDefault(Hints.FORCE_LONGITUDE_FIRST_AXIS_ORDER))) {
      Hints.putSystemDefault(Hints.FORCE_AXIS_ORDER_HONORING, "http");
    }
    Hints.putSystemDefault(Hints.LENIENT_DATUM_SHIFT, true);

    // setup the referencing tolerance to make it more tolerant to tiny differences
    // between projections (increases the chance of matching a random prj file content
    // to an actual EPSG code
    String comparisonToleranceProperty =
        GeoServerExtensions.getProperty(COMPARISON_TOLERANCE_PROPERTY);
    double comparisonTolerance = DEFAULT_COMPARISON_TOLERANCE;
    if (comparisonToleranceProperty != null) {
      try {
        comparisonTolerance = Double.parseDouble(comparisonToleranceProperty);
      } catch (NumberFormatException nfe) {
        if (LOGGER.isLoggable(Level.WARNING)) {
          LOGGER.warning(
              "Unable to parse the specified COMPARISON_TOLERANCE "
                  + "system property: "
                  + comparisonToleranceProperty
                  + " which should be a number. Using Default: "
                  + DEFAULT_COMPARISON_TOLERANCE);
        }
      }
    }
    Hints.putSystemDefault(Hints.COMPARISON_TOLERANCE, comparisonTolerance);

    final Hints defHints = GeoTools.getDefaultHints();

    // Initialize GridCoverageFactory so that we don't make a lookup every time a factory is needed
    Hints.putSystemDefault(
        Hints.GRID_COVERAGE_FACTORY, CoverageFactoryFinder.getGridCoverageFactory(defHints));

    // don't allow the connection to the EPSG database to time out. This is a server app,
    // we can afford keeping the EPSG db always on
    System.setProperty("org.geotools.epsg.factory.timeout", "-1");

    // HACK: java.util.prefs are awful. See
    // http://www.allaboutbalance.com/disableprefs. When the site comes
    // back up we should implement their better way of fixing the problem.
    System.setProperty("java.util.prefs.syncInterval", "5000000");

    // Fix issue with tomcat and JreMemoryLeakPreventionListener causing issues with
    // IIORegistry leading to imageio plugins not being properly initialized
    ImageIO.scanForPlugins();

    // in any case, the native png reader is worse than the pure java ones, so
    // let's disable it (the native png writer is on the other side faster)...
    ImageIOExt.allowNativeCodec("png", ImageReaderSpi.class, false);
    ImageIOExt.allowNativeCodec("png", ImageWriterSpi.class, true);

    // initialize GeoTools factories so that we don't make a SPI lookup every time a factory is
    // needed
    Hints.putSystemDefault(Hints.FILTER_FACTORY, CommonFactoryFinder.getFilterFactory2(null));
    Hints.putSystemDefault(Hints.STYLE_FACTORY, CommonFactoryFinder.getStyleFactory(null));
    Hints.putSystemDefault(Hints.FEATURE_FACTORY, CommonFactoryFinder.getFeatureFactory(null));

    // initialize the default executor service
    final ThreadPoolExecutor executor =
        new ThreadPoolExecutor(
            CoverageAccessInfoImpl.DEFAULT_CorePoolSize,
            CoverageAccessInfoImpl.DEFAULT_MaxPoolSize,
            CoverageAccessInfoImpl.DEFAULT_KeepAliveTime,
            TimeUnit.MILLISECONDS,
            new LinkedBlockingQueue<Runnable>());
    Hints.putSystemDefault(Hints.EXECUTOR_SERVICE, executor);
  }
  /**
   * This method tries hard to stop all threads and remove all references to classes in GeoServer so
   * that we can avoid permgen leaks on application undeploy. What happes is that, if any JDK class
   * references to one of the classes loaded by the webapp classloader, then the classloader cannot
   * be collected and neither can all the classes loaded by it (since each class keeps a back
   * reference to the classloader that loaded it). The same happens for any residual thread launched
   * by the web app.
   */
  public void contextDestroyed(ServletContextEvent sce) {
    try {
      LOGGER.info("Beginning GeoServer cleanup sequence");

      // the dreaded classloader
      ClassLoader webappClassLoader = getClass().getClassLoader();

      // unload all of the jdbc drivers we have loaded. We need to store them and unregister
      // later to avoid concurrent modification exceptions
      Enumeration<Driver> drivers = DriverManager.getDrivers();
      Set<Driver> driversToUnload = new HashSet<Driver>();
      while (drivers.hasMoreElements()) {
        Driver driver = drivers.nextElement();
        try {
          // the driver class loader can be null if the driver comes from the JDK, such as the
          // sun.jdbc.odbc.JdbcOdbcDriver
          ClassLoader driverClassLoader = driver.getClass().getClassLoader();
          if (driverClassLoader != null && webappClassLoader.equals(driverClassLoader)) {
            driversToUnload.add(driver);
          }
        } catch (Throwable t) {
          t.printStackTrace();
        }
      }
      for (Driver driver : driversToUnload) {
        try {
          DriverManager.deregisterDriver(driver);
          LOGGER.info("Unregistered JDBC driver " + driver);
        } catch (Exception e) {
          LOGGER.log(Level.SEVERE, "Could now unload driver " + driver.getClass(), e);
        }
      }
      drivers = DriverManager.getDrivers();
      while (drivers.hasMoreElements()) {
        Driver driver = drivers.nextElement();
      }
      try {
        Class h2Driver = Class.forName("org.h2.Driver");
        Method m = h2Driver.getMethod("unload");
        m.invoke(null);
      } catch (Exception e) {
        LOGGER.log(Level.WARNING, "Failed to unload the H2 driver", e);
      }

      // unload all deferred authority factories so that we get rid of the timer tasks in them
      try {
        disposeAuthorityFactories(
            ReferencingFactoryFinder.getCoordinateOperationAuthorityFactories(null));
      } catch (Throwable e) {
        LOGGER.log(Level.WARNING, "Error occurred trying to dispose authority factories", e);
      }
      try {
        disposeAuthorityFactories(ReferencingFactoryFinder.getCRSAuthorityFactories(null));
      } catch (Throwable e) {
        LOGGER.log(Level.WARNING, "Error occurred trying to dispose authority factories", e);
      }
      try {
        disposeAuthorityFactories(ReferencingFactoryFinder.getCSAuthorityFactories(null));
      } catch (Throwable e) {
        LOGGER.log(Level.WARNING, "Error occurred trying to dispose authority factories", e);
      }

      // kill the threads created by referencing
      WeakCollectionCleaner.DEFAULT.exit();
      DeferredAuthorityFactory.exit();
      CRS.reset("all");
      LOGGER.info("Shut down GT referencing threads ");
      // reset
      ReferencingFactoryFinder.reset();
      CommonFactoryFinder.reset();
      DataStoreFinder.reset();
      DataAccessFinder.reset();
      LOGGER.info("Shut down GT  SPI ");

      LOGGER.info("Shut down coverage thread pool ");
      Object o = Hints.getSystemDefault(Hints.EXECUTOR_SERVICE);
      if (o != null && o instanceof ExecutorService) {
        final ThreadPoolExecutor executor = (ThreadPoolExecutor) o;
        try {
          executor.shutdown();
        } finally {
          try {
            executor.shutdownNow();
          } finally {

          }
        }
      }

      // unload everything that JAI ImageIO can still refer to
      // We need to store them and unregister later to avoid concurrent modification exceptions
      final IIORegistry ioRegistry = IIORegistry.getDefaultInstance();
      Set<IIOServiceProvider> providersToUnload = new HashSet();
      for (Iterator<Class<?>> cats = ioRegistry.getCategories(); cats.hasNext(); ) {
        Class<?> category = cats.next();
        for (Iterator it = ioRegistry.getServiceProviders(category, false); it.hasNext(); ) {
          final IIOServiceProvider provider = (IIOServiceProvider) it.next();
          if (webappClassLoader.equals(provider.getClass().getClassLoader())) {
            providersToUnload.add(provider);
          }
        }
      }
      for (IIOServiceProvider provider : providersToUnload) {
        ioRegistry.deregisterServiceProvider(provider);
        LOGGER.info("Unregistering Image I/O provider " + provider);
      }

      // unload everything that JAI can still refer to
      final OperationRegistry opRegistry = JAI.getDefaultInstance().getOperationRegistry();
      for (String mode : RegistryMode.getModeNames()) {
        for (Iterator descriptors = opRegistry.getDescriptors(mode).iterator();
            descriptors != null && descriptors.hasNext(); ) {
          RegistryElementDescriptor red = (RegistryElementDescriptor) descriptors.next();
          int factoryCount = 0;
          int unregisteredCount = 0;
          // look for all the factories for that operation
          for (Iterator factories = opRegistry.getFactoryIterator(mode, red.getName());
              factories != null && factories.hasNext(); ) {
            Object factory = factories.next();
            if (factory == null) {
              continue;
            }
            factoryCount++;
            if (webappClassLoader.equals(factory.getClass().getClassLoader())) {
              boolean unregistered = false;
              // we need to scan against all "products" to unregister the factory
              Vector orderedProductList = opRegistry.getOrderedProductList(mode, red.getName());
              if (orderedProductList != null) {
                for (Iterator products = orderedProductList.iterator();
                    products != null && products.hasNext(); ) {
                  String product = (String) products.next();
                  try {
                    opRegistry.unregisterFactory(mode, red.getName(), product, factory);
                    LOGGER.info("Unregistering JAI factory " + factory.getClass());
                  } catch (Throwable t) {
                    // may fail due to the factory not being registered against that product
                  }
                }
              }
              if (unregistered) {
                unregisteredCount++;
              }
            }
          }

          // if all the factories were unregistered, get rid of the descriptor as well
          if (factoryCount > 0 && unregisteredCount == factoryCount) {
            opRegistry.unregisterDescriptor(red);
          }
        }
      }

      // flush all javabean introspection caches as this too can keep a webapp classloader from
      // being unloaded
      Introspector.flushCaches();
      LOGGER.info("Cleaned up javabean caches");

      // unload the logging framework
      if (!relinquishLoggingControl) LogManager.shutdown();
      LogFactory.release(Thread.currentThread().getContextClassLoader());

      // GeoTools/GeoServer have a lot of finalizers and until they are run the JVM
      // itself wil keepup the class loader...
      try {
        System.gc();
        System.runFinalization();
        System.gc();
        System.runFinalization();
        System.gc();
        System.runFinalization();
      } catch (Throwable t) {
        LOGGER.severe("Failed to perform closing up finalization");
        t.printStackTrace();
      }
    } catch (Throwable t) {
      // if anything goes south during the cleanup procedures I want to know what it is
      t.printStackTrace();
    }
  }
  protected Attribute setAttributeContent(
      Attribute target,
      StepList xpath,
      Object value,
      String id,
      AttributeType targetNodeType,
      boolean isXlinkRef,
      Expression sourceExpression,
      Object source,
      final Map<Name, Expression> clientProperties,
      boolean ignoreXlinkHref) {
    Attribute instance = null;

    Map<Name, Expression> properties = new HashMap<Name, Expression>(clientProperties);

    if (ignoreXlinkHref) {
      properties.remove(XLINK_HREF_NAME);
    }

    if (properties.containsKey(XLINK_HREF_NAME) && resolveDepth > 0) {
      // local resolve

      String refid =
          referenceToIdentifier(getValue(properties.get(XLINK_HREF_NAME), source).toString());

      if (refid != null) {

        final Hints hints = new Hints();
        if (resolveDepth > 1) {
          hints.put(Hints.RESOLVE, ResolveValueType.ALL);
          // only the top-level resolve thread should monitor timeout
          hints.put(Hints.RESOLVE_TIMEOUT, Integer.MAX_VALUE);
          hints.put(Hints.ASSOCIATION_TRAVERSAL_DEPTH, resolveDepth - 1);
        } else {
          hints.put(Hints.RESOLVE, ResolveValueType.NONE);
        }

        // let's try finding it
        FeatureFinder finder = new FeatureFinder(refid, hints);
        // this will be null if joining or sleeping is interrupted
        Feature foundFeature = null;
        if (resolveTimeOut == Integer.MAX_VALUE) {
          // not the top-level resolve thread so do not monitor timeout
          finder.run();
          foundFeature = finder.getFeature();
        } else {
          Thread thread = new Thread(finder);
          long startTime = System.currentTimeMillis();
          thread.start();
          try {
            while (thread.isAlive()
                && (System.currentTimeMillis() - startTime) / 1000 < resolveTimeOut) {
              Thread.sleep(RESOLVE_TIMEOUT_POLL_INTERVAL);
            }
            thread.interrupt();
            // joining ensures synchronisation
            thread.join();
            foundFeature = finder.getFeature();
          } catch (InterruptedException e) {
            // clean up as best we can
            thread.interrupt();
            throw new RuntimeException("Interrupted while resolving resource " + refid);
          }
        }

        if (foundFeature != null) {
          // found it
          instance =
              xpathAttributeBuilder.set(
                  target,
                  xpath,
                  Collections.singletonList(foundFeature),
                  id,
                  targetNodeType,
                  false,
                  sourceExpression);
          properties.remove(XLINK_HREF_NAME);
        }
      }
    }

    if (instance == null) {
      instance =
          xpathAttributeBuilder.set(
              target, xpath, value, id, targetNodeType, false, sourceExpression);
    }

    setClientProperties(instance, source, properties);

    return instance;
  }
예제 #23
0
 /** Hints constructor for FactoryRegistry */
 public JTSComplexFactory(Hints hints) {
   this((CoordinateReferenceSystem) hints.get(Hints.CRS));
 }
예제 #24
0
  /**
   * Load a specified a raster as a portion of the granule describe by this {@link
   * GranuleDescriptor}.
   *
   * @param imageReadParameters the {@link ImageReadParam} to use for reading.
   * @param index the index to use for the {@link ImageReader}.
   * @param cropBBox the bbox to use for cropping.
   * @param mosaicWorldToGrid the cropping grid to world transform.
   * @param request the incoming request to satisfy.
   * @param hints {@link Hints} to be used for creating this raster.
   * @return a specified a raster as a portion of the granule describe by this {@link
   *     GranuleDescriptor}.
   * @throws IOException in case an error occurs.
   */
  public GranuleLoadingResult loadRaster(
      final ImageReadParam imageReadParameters,
      final int index,
      final ReferencedEnvelope cropBBox,
      final MathTransform2D mosaicWorldToGrid,
      final RasterLayerRequest request,
      final Hints hints)
      throws IOException {

    if (LOGGER.isLoggable(java.util.logging.Level.FINER)) {
      final String name = Thread.currentThread().getName();
      LOGGER.finer(
          "Thread:" + name + " Loading raster data for granuleDescriptor " + this.toString());
    }
    ImageReadParam readParameters = null;
    int imageIndex;
    final ReferencedEnvelope bbox =
        inclusionGeometry != null
            ? new ReferencedEnvelope(
                granuleBBOX.intersection(inclusionGeometry.getEnvelopeInternal()),
                granuleBBOX.getCoordinateReferenceSystem())
            : granuleBBOX;
    boolean doFiltering = false;
    if (filterMe) {
      doFiltering = Utils.areaIsDifferent(inclusionGeometry, baseGridToWorld, granuleBBOX);
    }

    // intersection of this tile bound with the current crop bbox
    final ReferencedEnvelope intersection =
        new ReferencedEnvelope(
            bbox.intersection(cropBBox), cropBBox.getCoordinateReferenceSystem());
    if (intersection.isEmpty()) {
      if (LOGGER.isLoggable(java.util.logging.Level.FINE)) {
        LOGGER.fine(
            new StringBuilder("Got empty intersection for granule ")
                .append(this.toString())
                .append(" with request ")
                .append(request.toString())
                .append(" Resulting in no granule loaded: Empty result")
                .toString());
      }
      return null;
    }

    ImageInputStream inStream = null;
    ImageReader reader = null;
    try {
      //
      // get info about the raster we have to read
      //

      // get a stream
      assert cachedStreamSPI != null : "no cachedStreamSPI available!";
      inStream =
          cachedStreamSPI.createInputStreamInstance(
              granuleUrl, ImageIO.getUseCache(), ImageIO.getCacheDirectory());
      if (inStream == null) return null;

      // get a reader and try to cache the relevant SPI
      if (cachedReaderSPI == null) {
        reader = ImageIOExt.getImageioReader(inStream);
        if (reader != null) cachedReaderSPI = reader.getOriginatingProvider();
      } else reader = cachedReaderSPI.createReaderInstance();
      if (reader == null) {
        if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) {
          LOGGER.warning(
              new StringBuilder("Unable to get s reader for granuleDescriptor ")
                  .append(this.toString())
                  .append(" with request ")
                  .append(request.toString())
                  .append(" Resulting in no granule loaded: Empty result")
                  .toString());
        }
        return null;
      }
      // set input
      reader.setInput(inStream);

      // Checking for heterogeneous granules
      if (request.isHeterogeneousGranules()) {
        // create read parameters
        readParameters = new ImageReadParam();

        // override the overviews controller for the base layer
        imageIndex =
            ReadParamsController.setReadParams(
                request.getRequestedResolution(),
                request.getOverviewPolicy(),
                request.getDecimationPolicy(),
                readParameters,
                request.rasterManager,
                overviewsController);
      } else {
        imageIndex = index;
        readParameters = imageReadParameters;
      }

      // get selected level and base level dimensions
      final GranuleOverviewLevelDescriptor selectedlevel = getLevel(imageIndex, reader);

      // now create the crop grid to world which can be used to decide
      // which source area we need to crop in the selected level taking
      // into account the scale factors imposed by the selection of this
      // level together with the base level grid to world transformation
      AffineTransform2D cropWorldToGrid =
          new AffineTransform2D(selectedlevel.gridToWorldTransformCorner);
      cropWorldToGrid = (AffineTransform2D) cropWorldToGrid.inverse();
      // computing the crop source area which lives into the
      // selected level raster space, NOTICE that at the end we need to
      // take into account the fact that we might also decimate therefore
      // we cannot just use the crop grid to world but we need to correct
      // it.
      final Rectangle sourceArea =
          CRS.transform(cropWorldToGrid, intersection).toRectangle2D().getBounds();
      // gutter
      if (selectedlevel.baseToLevelTransform.isIdentity()) sourceArea.grow(2, 2);
      XRectangle2D.intersect(
          sourceArea,
          selectedlevel.rasterDimensions,
          sourceArea); // make sure roundings don't bother us
      // is it empty??
      if (sourceArea.isEmpty()) {
        if (LOGGER.isLoggable(java.util.logging.Level.FINE)) {
          LOGGER.fine(
              "Got empty area for granuleDescriptor "
                  + this.toString()
                  + " with request "
                  + request.toString()
                  + " Resulting in no granule loaded: Empty result");
        }
        return null;

      } else if (LOGGER.isLoggable(java.util.logging.Level.FINER)) {
        LOGGER.finer(
            "Loading level "
                + imageIndex
                + " with source region: "
                + sourceArea
                + " subsampling: "
                + readParameters.getSourceXSubsampling()
                + ","
                + readParameters.getSourceYSubsampling()
                + " for granule:"
                + granuleUrl);
      }

      // Setting subsampling
      int newSubSamplingFactor = 0;
      final String pluginName = cachedReaderSPI.getPluginClassName();
      if (pluginName != null && pluginName.equals(ImageUtilities.DIRECT_KAKADU_PLUGIN)) {
        final int ssx = readParameters.getSourceXSubsampling();
        final int ssy = readParameters.getSourceYSubsampling();
        newSubSamplingFactor = ImageIOUtilities.getSubSamplingFactor2(ssx, ssy);
        if (newSubSamplingFactor != 0) {
          if (newSubSamplingFactor > maxDecimationFactor && maxDecimationFactor != -1) {
            newSubSamplingFactor = maxDecimationFactor;
          }
          readParameters.setSourceSubsampling(newSubSamplingFactor, newSubSamplingFactor, 0, 0);
        }
      }

      // set the source region
      readParameters.setSourceRegion(sourceArea);
      final RenderedImage raster;
      try {
        // read
        raster =
            request
                .getReadType()
                .read(
                    readParameters,
                    imageIndex,
                    granuleUrl,
                    selectedlevel.rasterDimensions,
                    reader,
                    hints,
                    false);

      } catch (Throwable e) {
        if (LOGGER.isLoggable(java.util.logging.Level.FINE)) {
          LOGGER.log(
              java.util.logging.Level.FINE,
              "Unable to load raster for granuleDescriptor "
                  + this.toString()
                  + " with request "
                  + request.toString()
                  + " Resulting in no granule loaded: Empty result",
              e);
        }
        return null;
      }

      // use fixed source area
      sourceArea.setRect(readParameters.getSourceRegion());

      //
      // setting new coefficients to define a new affineTransformation
      // to be applied to the grid to world transformation
      // -----------------------------------------------------------------------------------
      //
      // With respect to the original envelope, the obtained planarImage
      // needs to be rescaled. The scaling factors are computed as the
      // ratio between the cropped source region sizes and the read
      // image sizes.
      //
      // place it in the mosaic using the coords created above;
      double decimationScaleX = ((1.0 * sourceArea.width) / raster.getWidth());
      double decimationScaleY = ((1.0 * sourceArea.height) / raster.getHeight());
      final AffineTransform decimationScaleTranform =
          XAffineTransform.getScaleInstance(decimationScaleX, decimationScaleY);

      // keep into account translation  to work into the selected level raster space
      final AffineTransform afterDecimationTranslateTranform =
          XAffineTransform.getTranslateInstance(sourceArea.x, sourceArea.y);

      // now we need to go back to the base level raster space
      final AffineTransform backToBaseLevelScaleTransform = selectedlevel.baseToLevelTransform;

      // now create the overall transform
      final AffineTransform finalRaster2Model = new AffineTransform(baseGridToWorld);
      finalRaster2Model.concatenate(CoverageUtilities.CENTER_TO_CORNER);
      final double x = finalRaster2Model.getTranslateX();
      final double y = finalRaster2Model.getTranslateY();

      if (!XAffineTransform.isIdentity(backToBaseLevelScaleTransform, Utils.AFFINE_IDENTITY_EPS))
        finalRaster2Model.concatenate(backToBaseLevelScaleTransform);
      if (!XAffineTransform.isIdentity(afterDecimationTranslateTranform, Utils.AFFINE_IDENTITY_EPS))
        finalRaster2Model.concatenate(afterDecimationTranslateTranform);
      if (!XAffineTransform.isIdentity(decimationScaleTranform, Utils.AFFINE_IDENTITY_EPS))
        finalRaster2Model.concatenate(decimationScaleTranform);

      // keep into account translation factors to place this tile
      finalRaster2Model.preConcatenate((AffineTransform) mosaicWorldToGrid);
      final Interpolation interpolation = request.getInterpolation();
      // paranoiac check to avoid that JAI freaks out when computing its internal layouT on images
      // that are too small
      Rectangle2D finalLayout =
          ImageUtilities.layoutHelper(
              raster,
              (float) finalRaster2Model.getScaleX(),
              (float) finalRaster2Model.getScaleY(),
              (float) finalRaster2Model.getTranslateX(),
              (float) finalRaster2Model.getTranslateY(),
              interpolation);
      if (finalLayout.isEmpty()) {
        if (LOGGER.isLoggable(java.util.logging.Level.INFO))
          LOGGER.info(
              "Unable to create a granuleDescriptor "
                  + this.toString()
                  + " due to jai scale bug creating a null source area");
        return null;
      }
      ROI granuleLoadingShape = null;
      if (granuleROIShape != null) {

        final Point2D translate =
            mosaicWorldToGrid.transform(new DirectPosition2D(x, y), (Point2D) null);
        AffineTransform tx2 = new AffineTransform();
        tx2.preConcatenate(
            AffineTransform.getScaleInstance(
                ((AffineTransform) mosaicWorldToGrid).getScaleX(),
                -((AffineTransform) mosaicWorldToGrid).getScaleY()));
        tx2.preConcatenate(
            AffineTransform.getScaleInstance(
                ((AffineTransform) baseGridToWorld).getScaleX(),
                -((AffineTransform) baseGridToWorld).getScaleY()));
        tx2.preConcatenate(
            AffineTransform.getTranslateInstance(translate.getX(), translate.getY()));
        granuleLoadingShape = (ROI) granuleROIShape.transform(tx2);
      }
      // apply the affine transform  conserving indexed color model
      final RenderingHints localHints =
          new RenderingHints(
              JAI.KEY_REPLACE_INDEX_COLOR_MODEL,
              interpolation instanceof InterpolationNearest ? Boolean.FALSE : Boolean.TRUE);
      if (XAffineTransform.isIdentity(finalRaster2Model, Utils.AFFINE_IDENTITY_EPS)) {
        return new GranuleLoadingResult(raster, granuleLoadingShape, granuleUrl, doFiltering);
      } else {
        //
        // In case we are asked to use certain tile dimensions we tile
        // also at this stage in case the read type is Direct since
        // buffered images comes up untiled and this can affect the
        // performances of the subsequent affine operation.
        //
        final Dimension tileDimensions = request.getTileDimensions();
        if (tileDimensions != null && request.getReadType().equals(ReadType.DIRECT_READ)) {
          final ImageLayout layout = new ImageLayout();
          layout.setTileHeight(tileDimensions.width).setTileWidth(tileDimensions.height);
          localHints.add(new RenderingHints(JAI.KEY_IMAGE_LAYOUT, layout));
        } else {
          if (hints != null && hints.containsKey(JAI.KEY_IMAGE_LAYOUT)) {
            final Object layout = hints.get(JAI.KEY_IMAGE_LAYOUT);
            if (layout != null && layout instanceof ImageLayout) {
              localHints.add(
                  new RenderingHints(JAI.KEY_IMAGE_LAYOUT, ((ImageLayout) layout).clone()));
            }
          }
        }
        if (hints != null && hints.containsKey(JAI.KEY_TILE_CACHE)) {
          final Object cache = hints.get(JAI.KEY_TILE_CACHE);
          if (cache != null && cache instanceof TileCache)
            localHints.add(new RenderingHints(JAI.KEY_TILE_CACHE, (TileCache) cache));
        }
        if (hints != null && hints.containsKey(JAI.KEY_TILE_SCHEDULER)) {
          final Object scheduler = hints.get(JAI.KEY_TILE_SCHEDULER);
          if (scheduler != null && scheduler instanceof TileScheduler)
            localHints.add(new RenderingHints(JAI.KEY_TILE_SCHEDULER, (TileScheduler) scheduler));
        }
        boolean addBorderExtender = true;
        if (hints != null && hints.containsKey(JAI.KEY_BORDER_EXTENDER)) {
          final Object extender = hints.get(JAI.KEY_BORDER_EXTENDER);
          if (extender != null && extender instanceof BorderExtender) {
            localHints.add(new RenderingHints(JAI.KEY_BORDER_EXTENDER, (BorderExtender) extender));
            addBorderExtender = false;
          }
        }
        // border extender
        if (addBorderExtender) {
          localHints.add(ImageUtilities.BORDER_EXTENDER_HINTS);
        }
        //                boolean hasScaleX=!(Math.abs(finalRaster2Model.getScaleX()-1) <
        // 1E-2/(raster.getWidth()+1-raster.getMinX()));
        //                boolean hasScaleY=!(Math.abs(finalRaster2Model.getScaleY()-1) <
        // 1E-2/(raster.getHeight()+1-raster.getMinY()));
        //                boolean hasShearX=!(finalRaster2Model.getShearX() == 0.0);
        //                boolean hasShearY=!(finalRaster2Model.getShearY() == 0.0);
        //                boolean hasTranslateX=!(Math.abs(finalRaster2Model.getTranslateX()) <
        // 0.01F);
        //                boolean hasTranslateY=!(Math.abs(finalRaster2Model.getTranslateY()) <
        // 0.01F);
        //                boolean isTranslateXInt=!(Math.abs(finalRaster2Model.getTranslateX() -
        // (int) finalRaster2Model.getTranslateX()) <  0.01F);
        //                boolean isTranslateYInt=!(Math.abs(finalRaster2Model.getTranslateY() -
        // (int) finalRaster2Model.getTranslateY()) <  0.01F);
        //
        //                boolean isIdentity = finalRaster2Model.isIdentity() &&
        // !hasScaleX&&!hasScaleY &&!hasTranslateX&&!hasTranslateY;

        //                // TODO how can we check that the a skew is harmelss????
        //                if(isIdentity){
        //                    // TODO check if we are missing anything like tiling or such that
        // comes from hints
        //                    return new GranuleLoadingResult(raster, granuleLoadingShape,
        // granuleUrl, doFiltering);
        //                }
        //
        //                // TOLERANCE ON PIXELS SIZE
        //
        //                // Check and see if the affine transform is in fact doing
        //                // a Translate operation. That is a scale by 1 and no rotation.
        //                // In which case call translate. Note that only integer translate
        //                // is applicable. For non-integer translate we'll have to do the
        //                // affine.
        //                // If the hints contain an ImageLayout hint, we can't use
        //                // TranslateIntOpImage since it isn't capable of dealing with that.
        //                // Get ImageLayout from renderHints if any.
        //                ImageLayout layout = RIFUtil.getImageLayoutHint(localHints);
        //                if ( !hasScaleX &&
        //                     !hasScaleY  &&
        //                      !hasShearX&&
        //                      !hasShearY&&
        //                      isTranslateXInt&&
        //                      isTranslateYInt&&
        //                    layout == null) {
        //                    // It's a integer translate
        //                    return new GranuleLoadingResult(new TranslateIntOpImage(raster,
        //                                                    localHints,
        //                                                   (int) finalRaster2Model.getShearX(),
        //                                                   (int)
        // finalRaster2Model.getShearY()),granuleLoadingShape, granuleUrl, doFiltering);
        //                }

        ImageWorker iw = new ImageWorker(raster);
        iw.setRenderingHints(localHints);
        iw.affine(finalRaster2Model, interpolation, request.getBackgroundValues());
        return new GranuleLoadingResult(
            iw.getRenderedImage(), granuleLoadingShape, granuleUrl, doFiltering);
      }

    } catch (IllegalStateException e) {
      if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) {
        LOGGER.log(
            java.util.logging.Level.WARNING,
            new StringBuilder("Unable to load raster for granuleDescriptor ")
                .append(this.toString())
                .append(" with request ")
                .append(request.toString())
                .append(" Resulting in no granule loaded: Empty result")
                .toString(),
            e);
      }
      return null;
    } catch (org.opengis.referencing.operation.NoninvertibleTransformException e) {
      if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) {
        LOGGER.log(
            java.util.logging.Level.WARNING,
            new StringBuilder("Unable to load raster for granuleDescriptor ")
                .append(this.toString())
                .append(" with request ")
                .append(request.toString())
                .append(" Resulting in no granule loaded: Empty result")
                .toString(),
            e);
      }
      return null;
    } catch (TransformException e) {
      if (LOGGER.isLoggable(java.util.logging.Level.WARNING)) {
        LOGGER.log(
            java.util.logging.Level.WARNING,
            new StringBuilder("Unable to load raster for granuleDescriptor ")
                .append(this.toString())
                .append(" with request ")
                .append(request.toString())
                .append(" Resulting in no granule loaded: Empty result")
                .toString(),
            e);
      }
      return null;

    } finally {
      try {
        if (request.getReadType() != ReadType.JAI_IMAGEREAD && inStream != null) {
          inStream.close();
        }
      } finally {
        if (request.getReadType() != ReadType.JAI_IMAGEREAD && reader != null) {
          reader.dispose();
        }
      }
    }
  }
예제 #25
0
  /**
   * Takes two {@link Query}objects and produce a new one by mixing the restrictions of both of
   * them.
   *
   * <p>The policy to mix the queries components is the following:
   *
   * <ul>
   *   <li>typeName: type names MUST match (not checked if some or both queries equals to <code>
   *       Query.ALL</code>)
   *   <li>handle: you must provide one since no sensible choice can be done between the handles of
   *       both queries
   *   <li>maxFeatures: the lower of the two maxFeatures values will be used (most restrictive)
   *   <li>attributeNames: the attributes of both queries will be joined in a single set of
   *       attributes. IMPORTANT: only <b><i>explicitly</i></b> requested attributes will be joint,
   *       so, if the method <code>retrieveAllProperties()</code> of some of the queries returns
   *       <code>true</code> it does not means that all the properties will be joined. You must
   *       create the query with the names of the properties you want to load.
   *   <li>filter: the filtets of both queries are or'ed
   *   <li><b>any other query property is ignored</b> and no guarantees are made of their return
   *       values, so client code shall explicitly care of hints, startIndex, etc., if needed.
   * </ul>
   *
   * @param firstQuery Query against this DataStore
   * @param secondQuery DOCUMENT ME!
   * @param handle DOCUMENT ME!
   * @return Query restricted to the limits of definitionQuery
   * @throws NullPointerException if some of the queries is null
   * @throws IllegalArgumentException if the type names of both queries do not match
   */
  public static Query mixQueries(Query firstQuery, Query secondQuery, String handle) {
    if ((firstQuery == null) && (secondQuery == null)) {
      // throw new NullPointerException("Cannot combine two null queries");
      return Query.ALL;
    }
    if (firstQuery == null || firstQuery.equals(Query.ALL)) {
      return secondQuery;
    } else if (secondQuery == null || secondQuery.equals(Query.ALL)) {
      return firstQuery;
    }
    if ((firstQuery.getTypeName() != null) && (secondQuery.getTypeName() != null)) {
      if (!firstQuery.getTypeName().equals(secondQuery.getTypeName())) {
        String msg =
            "Type names do not match: "
                + firstQuery.getTypeName()
                + " != "
                + secondQuery.getTypeName();
        throw new IllegalArgumentException(msg);
      }
    }

    // mix versions, if possible
    String version;
    if (firstQuery.getVersion() != null) {
      if (secondQuery.getVersion() != null
          && !secondQuery.getVersion().equals(firstQuery.getVersion()))
        throw new IllegalArgumentException("First and second query refer different versions");
      version = firstQuery.getVersion();
    } else {
      version = secondQuery.getVersion();
    }

    // none of the queries equals Query.ALL, mix them
    // use the more restrictive max features field
    int maxFeatures = Math.min(firstQuery.getMaxFeatures(), secondQuery.getMaxFeatures());

    // join attributes names
    String[] propNames =
        joinAttributes(firstQuery.getPropertyNames(), secondQuery.getPropertyNames());

    // join filters
    Filter filter = firstQuery.getFilter();
    Filter filter2 = secondQuery.getFilter();

    if ((filter == null) || filter.equals(Filter.INCLUDE)) {
      filter = filter2;
    } else if ((filter2 != null) && !filter2.equals(Filter.INCLUDE)) {
      filter = ff.and(filter, filter2);
    }
    Integer start = 0;
    if (firstQuery.getStartIndex() != null) {
      start = firstQuery.getStartIndex();
    }
    if (secondQuery.getStartIndex() != null) {
      start += secondQuery.getStartIndex();
    }
    // collect all hints
    Hints hints = new Hints();
    if (firstQuery.getHints() != null) {
      hints.putAll(firstQuery.getHints());
    }
    if (secondQuery.getHints() != null) {
      hints.putAll(secondQuery.getHints());
    }
    // build the mixed query
    String typeName =
        firstQuery.getTypeName() != null ? firstQuery.getTypeName() : secondQuery.getTypeName();

    Query mixed = new Query(typeName, filter, maxFeatures, propNames, handle);
    mixed.setVersion(version);
    mixed.setHints(hints);
    if (start != 0) {
      mixed.setStartIndex(start);
    }
    return mixed;
  }
예제 #26
0
  @Test
  public void NetCDFTestOn4DcoveragesWithImposedSchemas()
      throws NoSuchAuthorityCodeException, FactoryException, IOException, ParseException {
    File mosaic = new File(TestData.file(this, "."), "NetCDFTestOn4DcoveragesWithImposedSchemas");
    if (mosaic.exists()) {
      FileUtils.deleteDirectory(mosaic);
    }
    assertTrue(mosaic.mkdirs());

    File file = TestData.file(this, "O3NO2-noZ.nc");
    File auxFile = TestData.file(this, "O3NO2-noZ.xml");
    FileUtils.copyFileToDirectory(file, mosaic);
    FileUtils.copyFileToDirectory(auxFile, mosaic);
    file = new File(mosaic, "O3NO2-noZ.nc");

    final Hints hints =
        new Hints(Hints.DEFAULT_COORDINATE_REFERENCE_SYSTEM, CRS.decode("EPSG:4326", true));
    hints.put(
        Utils.AUXILIARY_FILES_PATH,
        new File(mosaic, "O3NO2-noZ.xml").getAbsolutePath()); // impose def

    // Get format
    final AbstractGridFormat format =
        (AbstractGridFormat) GridFormatFinder.findFormat(file.toURI().toURL(), hints);
    final NetCDFReader reader = (NetCDFReader) format.getReader(file.toURI().toURL(), hints);

    assertNotNull(format);
    try {
      String[] names = reader.getGridCoverageNames();
      for (String coverageName : names) {

        final String[] metadataNames = reader.getMetadataNames(coverageName);
        assertNotNull(metadataNames);
        assertEquals(metadataNames.length, 12);

        // Parsing metadata values
        assertEquals("true", reader.getMetadataValue(coverageName, "HAS_TIME_DOMAIN"));
        final String timeMetadata = reader.getMetadataValue(coverageName, "TIME_DOMAIN");
        assertEquals(
            "2012-04-01T00:00:00.000Z/2012-04-01T00:00:00.000Z,2012-04-01T01:00:00.000Z/2012-04-01T01:00:00.000Z",
            timeMetadata);
        assertNotNull(timeMetadata);
        assertEquals(
            "2012-04-01T00:00:00.000Z",
            reader.getMetadataValue(coverageName, "TIME_DOMAIN_MINIMUM"));
        assertEquals(
            "2012-04-01T01:00:00.000Z",
            reader.getMetadataValue(coverageName, "TIME_DOMAIN_MAXIMUM"));

        if (coverageName.equalsIgnoreCase("O3")) {
          assertEquals("true", reader.getMetadataValue(coverageName, "HAS_ELEVATION_DOMAIN"));
          final String elevationMetadata =
              reader.getMetadataValue(coverageName, "ELEVATION_DOMAIN");
          assertNotNull(elevationMetadata);
          assertEquals("10.0/10.0,450.0/450.0", elevationMetadata);
          assertEquals(2, elevationMetadata.split(",").length);
          assertEquals("10.0", reader.getMetadataValue(coverageName, "ELEVATION_DOMAIN_MINIMUM"));
          assertEquals("450.0", reader.getMetadataValue(coverageName, "ELEVATION_DOMAIN_MAXIMUM"));
        } else {
          // Note that This sample doesn't have elevation for NO2
          assertEquals("false", reader.getMetadataValue(coverageName, "HAS_ELEVATION_DOMAIN"));
          final String elevationMetadata =
              reader.getMetadataValue(coverageName, "ELEVATION_DOMAIN");
          assertNull(elevationMetadata);
        }

        // subsetting the envelope
        final ParameterValue<GridGeometry2D> gg =
            AbstractGridFormat.READ_GRIDGEOMETRY2D.createValue();
        final GeneralEnvelope originalEnvelope = reader.getOriginalEnvelope(coverageName);
        final GeneralEnvelope reducedEnvelope =
            new GeneralEnvelope(
                new double[] {
                  originalEnvelope.getLowerCorner().getOrdinate(0),
                  originalEnvelope.getLowerCorner().getOrdinate(1)
                },
                new double[] {
                  originalEnvelope.getMedian().getOrdinate(0),
                  originalEnvelope.getMedian().getOrdinate(1)
                });
        reducedEnvelope.setCoordinateReferenceSystem(
            reader.getCoordinateReferenceSystem(coverageName));

        // Selecting bigger gridRange for a zoomed result
        final Dimension dim = new Dimension();
        GridEnvelope gridRange = reader.getOriginalGridRange(coverageName);
        dim.setSize(gridRange.getSpan(0) * 4.0, gridRange.getSpan(1) * 2.0);
        final Rectangle rasterArea = ((GridEnvelope2D) gridRange);
        rasterArea.setSize(dim);
        final GridEnvelope2D range = new GridEnvelope2D(rasterArea);
        gg.setValue(new GridGeometry2D(range, reducedEnvelope));

        final ParameterValue<List> time = ImageMosaicFormat.TIME.createValue();
        final SimpleDateFormat formatD = new SimpleDateFormat("yyyy-MM-dd'T'HH:mm:ss.SSS'Z'");
        formatD.setTimeZone(TimeZone.getTimeZone("GMT"));
        final Date timeD = formatD.parse("2012-04-01T00:00:00.000Z");
        time.setValue(
            new ArrayList() {
              {
                add(timeD);
              }
            });

        final ParameterValue<List> elevation = ImageMosaicFormat.ELEVATION.createValue();
        elevation.setValue(
            new ArrayList() {
              {
                add(450d); // Elevation
              }
            });

        GeneralParameterValue[] values =
            coverageName.equalsIgnoreCase("O3")
                ? new GeneralParameterValue[] {gg, time, elevation}
                : new GeneralParameterValue[] {gg, time};

        GridCoverage2D coverage = reader.read(coverageName, values);
        assertNotNull(coverage);
        if (TestData.isInteractiveTest()) {
          coverage.show();
        } else {
          PlanarImage.wrapRenderedImage(coverage.getRenderedImage()).getTiles();
        }
      }
    } catch (Throwable t) {
      throw new RuntimeException(t);
    } finally {
      if (reader != null) {
        try {
          reader.dispose();
        } catch (Throwable t) {
          // Does nothing
        }
      }
    }
  }