@Override public ReferencedEnvelope getBounds() { try { ReferencedEnvelope bounds = featureSource.getBounds(); if (bounds != null) { FeatureType schema = featureSource.getSchema(); CoordinateReferenceSystem schemaCrs = schema.getCoordinateReferenceSystem(); CoordinateReferenceSystem boundsCrs = bounds.getCoordinateReferenceSystem(); if (boundsCrs == null && schemaCrs != null) { LOGGER.warning( "Bounds crs not defined; assuming bounds from schema are correct for " + featureSource); bounds = new ReferencedEnvelope( bounds.getMinX(), bounds.getMaxX(), bounds.getMinY(), bounds.getMaxY(), schemaCrs); } if (boundsCrs != null && schemaCrs != null && !CRS.equalsIgnoreMetadata(boundsCrs, schemaCrs)) { LOGGER.warning( "Bounds crs and schema crs are not consistent; forcing the use of the schema crs so they are consistent"); // bounds = bounds.transform(schemaCrs, true ); bounds = new ReferencedEnvelope( bounds.getMinX(), bounds.getMaxX(), bounds.getMinY(), bounds.getMaxY(), schemaCrs); } return bounds; } } catch (IOException e) { // feature bounds unavailable } CoordinateReferenceSystem crs = featureSource.getSchema().getCoordinateReferenceSystem(); if (crs != null) { // returns the envelope based on the CoordinateReferenceSystem Envelope envelope = CRS.getEnvelope(crs); if (envelope != null) { return new ReferencedEnvelope(envelope); // nice! } else { return new ReferencedEnvelope(crs); // empty bounds } } else { return null; // unknown } }
private OpenBitSet applySpatialFilter( Set<FeatureId> matches, Multimap<FeatureId, Integer> docIndexLookup, OpenBitSet bits) throws IOException { JeevesJCS jcs = getJCSCache(); processCachedFeatures(jcs, matches, docIndexLookup, bits); while (!matches.isEmpty()) { Id fidFilter; if (matches.size() > MAX_FIDS_PER_QUERY) { FeatureId[] subset = new FeatureId[MAX_FIDS_PER_QUERY]; int i = 0; Iterator<FeatureId> iter = matches.iterator(); while (iter.hasNext() && i < MAX_FIDS_PER_QUERY) { subset[i] = iter.next(); iter.remove(); i++; } fidFilter = _filterFactory.id(subset); } else { fidFilter = _filterFactory.id(matches); matches = Collections.emptySet(); } FeatureSource<SimpleFeatureType, SimpleFeature> _featureSource = sourceAccessor.one(); String ftn = _featureSource.getSchema().getName().getLocalPart(); String[] geomAtt = {_featureSource.getSchema().getGeometryDescriptor().getLocalName()}; FeatureCollection<SimpleFeatureType, SimpleFeature> features = _featureSource.getFeatures(new org.geotools.data.Query(ftn, fidFilter, geomAtt)); FeatureIterator<SimpleFeature> iterator = features.features(); try { while (iterator.hasNext()) { SimpleFeature feature = iterator.next(); FeatureId featureId = feature.getIdentifier(); jcs.put(featureId.getID(), feature.getDefaultGeometry()); if (evaluateFeature(feature)) { for (int doc : docIndexLookup.get(featureId)) { bits.set(doc); } } } } catch (CacheException e) { throw new Error(e); } finally { iterator.close(); } } return bits; }
@Override protected void setUpInternal(SystemTestData data) throws Exception { // run all the tests against a store that can do native paging (h2) and one that // can't (property) Catalog cat = getCatalog(); DataStoreInfo ds = cat.getFactory().createDataStore(); ds.setName("foo"); ds.setWorkspace(cat.getDefaultWorkspace()); Map params = ds.getConnectionParameters(); params.put("dbtype", "h2"); params.put("database", getTestData().getDataDirectoryRoot().getAbsolutePath()); cat.add(ds); FeatureSource fs1 = getFeatureSource(SystemTestData.FIFTEEN); FeatureSource fs2 = getFeatureSource(SystemTestData.SEVEN); DataStore store = (DataStore) ds.getDataStore(null); SimpleFeatureTypeBuilder tb = new SimpleFeatureTypeBuilder(); tb.init((SimpleFeatureType) fs1.getSchema()); tb.add("num", Integer.class); tb.remove("boundedBy"); store.createSchema(tb.buildFeatureType()); tb.init((SimpleFeatureType) fs2.getSchema()); tb.add("num", Integer.class); tb.remove("boundedBy"); store.createSchema(tb.buildFeatureType()); CatalogBuilder cb = new CatalogBuilder(cat); cb.setStore(ds); FeatureStore fs = (FeatureStore) store.getFeatureSource("Fifteen"); addFeatures(fs, fs1.getFeatures()); FeatureTypeInfo ft = cb.buildFeatureType(fs); cat.add(ft); fs = (FeatureStore) store.getFeatureSource("Seven"); addFeatures(fs, fs2.getFeatures()); ft = cb.buildFeatureType(fs); cat.add(ft); }
protected org.opengis.filter.Filter createFilter( FeatureSource<SimpleFeatureType, SimpleFeature> source) { String geomAttName = source.getSchema().getGeometryDescriptor().getLocalName(); PropertyName geomPropertyName = _filterFactory.property(geomAttName); Literal geomExpression = _filterFactory.literal(_geom); org.opengis.filter.Filter filter = createGeomFilter(_filterFactory, geomPropertyName, geomExpression); return filter; }
public Style style(IProgressMonitor monitor) { URL url = parent.getIdentifier(); File file = URLUtils.urlToFile(url); String shp = file.getAbsolutePath(); StyleFactory styleFactory = CommonFactoryFinder.getStyleFactory(GeoTools.getDefaultHints()); // strip off the extension and check for sld String sld = shp.substring(0, shp.length() - 4) + ".sld"; // $NON-NLS-1$ File f = new File(sld); if (!f.exists()) { // try upper case sld = shp.substring(0, shp.length() - 4) + ".SLD"; // $NON-NLS-1$ f = new File(sld); } if (f.exists()) { // parse it up SLDParser parser = new SLDParser(styleFactory); try { parser.setInput(f); } catch (FileNotFoundException e) { return null; // well that is unexpected since f.exists() } Style[] styles = parser.readXML(); FeatureSource<SimpleFeatureType, SimpleFeature> source; try { source = featureSource(null); } catch (IOException e) { return null; // does not look like there is anything in the shapefile } SimpleFeatureType featureType = source.getSchema(); // put the first one on if (styles != null && styles.length > 0) { Style style = SLD.matchingStyle(styles, featureType); if (style == null) { style = styles[0]; } makeGraphicsAbsolute(file, style); return style; } } return null; // well nothing worked out; make your own style }
/** Retrieve information about the feature geometry */ private void setGeometry() { GeometryDescriptor geomDesc = featureSource.getSchema().getGeometryDescriptor(); geometryAttributeName = geomDesc.getLocalName(); Class<?> clazz = geomDesc.getType().getBinding(); if (Polygon.class.isAssignableFrom(clazz) || MultiPolygon.class.isAssignableFrom(clazz)) { geometryType = GeomType.POLYGON; } else if (LineString.class.isAssignableFrom(clazz) || MultiLineString.class.isAssignableFrom(clazz)) { geometryType = GeomType.LINE; } else { geometryType = GeomType.POINT; } }
/** * Returns the collection of all values of the dimension attribute, eventually sorted if the * native capabilities allow for it * * @param typeInfo * @param dimension * @return * @throws IOException */ FeatureCollection getDimensionCollection(FeatureTypeInfo typeInfo, DimensionInfo dimension) throws IOException { // grab the feature source FeatureSource source = null; try { source = typeInfo.getFeatureSource(null, GeoTools.getDefaultHints()); } catch (IOException e) { throw new ServiceException( "Could not get the feauture source to list time info for layer " + typeInfo.getPrefixedName(), e); } // build query to grab the dimension values final Query dimQuery = new Query(source.getSchema().getName().getLocalPart()); dimQuery.setPropertyNames(Arrays.asList(dimension.getAttribute())); return source.getFeatures(dimQuery); }
private List<ZGroupLayerPainter> buildLayerPainters( Graphics2D graphics, StreamingRenderer renderer, String layerId, ProgressListener cancellationListener) throws IOException, FactoryException, NoninvertibleTransformException, SchemaException, TransformException { List<ZGroupLayerPainter> painters = new ArrayList<>(); boolean closePainters = true; try { for (Layer layer : layers) { // get the LiteFeatureTypeStyle for this layer final FeatureSource featureSource = layer.getFeatureSource(); if (featureSource == null) { throw new IllegalArgumentException("The layer does not contain a feature source"); } final FeatureType schema = featureSource.getSchema(); final ArrayList<LiteFeatureTypeStyle> lfts = renderer.createLiteFeatureTypeStyles(layer, graphics, false); if (lfts.isEmpty()) { continue; } else { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Processing " + lfts.size() + " stylers for " + schema.getName()); } } // get the feature iterator we need FeatureCollection features = renderer.getFeatures(layer, schema, lfts); // While we could use a non mark feature iterator for single fts layers, // that would cause multiple connections to be open at the same time, // which in turn could cause deadlocks against connection pools, so we // are going to build a MarkFeatureIterator regardless // TODO: we could optimize down to simple streaming painting if we end up // with a single painter with a single fts (due to scale dependencies) // but we'd have to delay opening the MarkFeatureIterator to recognize the // situation int maxFeatures = SortedFeatureReader.getMaxFeaturesInMemory(layer.getQuery()); MarkFeatureIterator fi = MarkFeatureIterator.create(features, maxFeatures, cancellationListener); if (fi.hasNext()) { ZGroupLayerPainter painter = new ZGroupLayerPainter(fi, lfts, renderer, layerId); painters.add(painter); } else { fi.close(); } } // got to the end cleanly, no need to close the painters accumulated so far closePainters = false; } finally { if (closePainters) { for (ZGroupLayerPainter painter : painters) { try { painter.close(); } catch (Exception e) { LOGGER.log(Level.FINE, "Failed to close cleanly layer painter " + painter, e); } } } } validateSortBy(painters); return painters; }
/** * Creates a new instance of FeatureSourceDescriptor * * @param id * @param name * @param source */ public SchemaDescriptor( Object id, String name, FeatureSource<SimpleFeatureType, SimpleFeature> source) { super(id, name); type = source.getSchema(); }
/** * Executes the import operation using the parameters that have been specified. Features will be * added to the working tree, and a new working tree will be constructed. Either {@code all} or * {@code table}, but not both, must be set prior to the import process. * * @return RevTree the new working tree */ @SuppressWarnings("deprecation") @Override public RevTree call() { // check preconditions and get the actual list of type names to import final String[] typeNames = checkPreconditions(); ProgressListener progressListener = getProgressListener(); progressListener.started(); // use a local variable not to alter the command's state boolean overwrite = this.overwrite; if (alter) { overwrite = false; } final WorkingTree workTree = getWorkTree(); final boolean destPathProvided = destPath != null; if (destPathProvided && overwrite) { // we delete the previous tree to honor the overwrite setting, but then turn it // to false. Otherwise, each table imported will overwrite the previous ones and // only the last one will be imported. try { workTree.delete(destPath); } catch (Exception e) { throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_INSERT); } overwrite = false; } int tableCount = 0; for (String typeName : typeNames) { { tableCount++; String tableName = String.format("%-16s", typeName); if (typeName.length() > 16) { tableName = tableName.substring(0, 13) + "..."; } progressListener.setDescription( "Importing " + tableName + " (" + tableCount + "/" + typeNames.length + ")... "); } FeatureSource featureSource = getFeatureSource(typeName); SimpleFeatureType featureType = (SimpleFeatureType) featureSource.getSchema(); final String fidPrefix = featureType.getTypeName() + "."; String path; if (destPath == null) { path = featureType.getTypeName(); } else { NodeRef.checkValidPath(destPath); path = destPath; featureType = createForceFeatureType(featureType, path); } featureSource = new ForceTypeAndFidFeatureSource<FeatureType, Feature>( featureSource, featureType, fidPrefix); ProgressListener taskProgress = subProgress(100.f / typeNames.length); if (overwrite) { try { workTree.delete(path); workTree.createTypeTree(path, featureType); } catch (Exception e) { throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_INSERT); } } if (alter) { // first we modify the feature type and the existing features, if needed workTree.updateTypeTree(path, featureType); Iterator<Feature> transformedIterator = transformFeatures(featureType, path); try { final Integer collectionSize = collectionSize(featureSource); workTree.insert(path, transformedIterator, taskProgress, null, collectionSize); } catch (Exception e) { throw new GeoToolsOpException(StatusCode.UNABLE_TO_INSERT); } } try { insert(workTree, path, featureSource, taskProgress); } catch (Exception e) { throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_INSERT); } } progressListener.progress(100.f); progressListener.complete(); return workTree.getTree(); }