/** Exact equality based on typeNames, namespace, attributes and ancestors */ static boolean equals(SimpleFeatureType typeA, SimpleFeatureType typeB, boolean compareUserMaps) { if (typeA == typeB) return true; if (typeA == null || typeB == null) { return false; } if (compareUserMaps) { if (!equals(typeA.getUserData(), typeB.getUserData())) return false; } return equalsId(typeA, typeB) && equals(typeA.getAttributeDescriptors(), typeB.getAttributeDescriptors(), compareUserMaps) && equalsAncestors(typeA, typeB); }
@Override public void createSchema(SimpleFeatureType featureType) throws IOException { List<String> header = new ArrayList<String>(); GeometryDescriptor geometryDescrptor = featureType.getGeometryDescriptor(); if (geometryDescrptor != null && CRS.equalsIgnoreMetadata( DefaultGeographicCRS.WGS84, geometryDescrptor.getCoordinateReferenceSystem()) && geometryDescrptor.getType().getBinding().isAssignableFrom(Point.class)) { header.add(this.latField); header.add(this.lngField); } else { throw new IOException( "Unable use '" + this.latField + "' / '" + this.lngField + "' to represent " + geometryDescrptor); } for (AttributeDescriptor descriptor : featureType.getAttributeDescriptors()) { if (descriptor instanceof GeometryDescriptor) continue; header.add(descriptor.getLocalName()); } // Write out header, producing an empty file of the correct type CsvWriter writer = new CsvWriter(new FileWriter(this.csvFileState.getFile()), ','); try { writer.writeRecord(header.toArray(new String[header.size()])); } finally { writer.close(); } }
private void collectAttributes( SimpleFeatureType schema, List<String> retainedAttributes, SimpleFeatureTypeBuilder tb) { for (AttributeDescriptor descriptor : schema.getAttributeDescriptors()) { // check whether descriptor has been selected in the attribute list boolean isInRetainList = true; if (retainedAttributes != null) { isInRetainList = retainedAttributes.contains(descriptor.getLocalName()); logger.fine("Checking " + descriptor.getLocalName() + " --> " + isInRetainList); } if (!isInRetainList || schema.getGeometryDescriptor() == descriptor) { continue; } // build the attribute to return AttributeTypeBuilder builder = new AttributeTypeBuilder(); builder.setName(schema.getName().getLocalPart() + "_" + descriptor.getName()); builder.setNillable(descriptor.isNillable()); builder.setBinding(descriptor.getType().getBinding()); builder.setMinOccurs(descriptor.getMinOccurs()); builder.setMaxOccurs(descriptor.getMaxOccurs()); builder.setDefaultValue(descriptor.getDefaultValue()); builder.setCRS(schema.getCoordinateReferenceSystem()); AttributeDescriptor intersectionDescriptor = builder.buildDescriptor( schema.getName().getLocalPart() + "_" + descriptor.getName(), descriptor.getType()); tb.add(intersectionDescriptor); tb.addBinding(descriptor.getType()); } }
@Override public void postCreateFeatureType( SimpleFeatureType featureType, DatabaseMetaData metadata, String schemaName, Connection cx) throws SQLException { // figure out if the table has a spatial index and mark the feature type as so for (AttributeDescriptor ad : featureType.getAttributeDescriptors()) { if (!(ad instanceof GeometryDescriptor)) { continue; } GeometryDescriptor gd = (GeometryDescriptor) ad; String idxTableName = "idx_" + featureType.getTypeName() + "_" + gd.getLocalName(); ResultSet rs = metadata.getTables( null, dataStore.escapeNamePattern(metadata, schemaName), dataStore.escapeNamePattern(metadata, idxTableName), new String[] {"TABLE"}); try { if (rs.next()) { gd.getUserData().put(SPATIALITE_SPATIAL_INDEX, idxTableName); } } finally { dataStore.closeSafe(rs); } } }
/** * @param dataStore A ShapeFileDataStore containing geometries to convert. * @param keyAttributes The names of attributes to be concatenated to generate record keys. * @throws Exception */ public static void convertFeatures( GeometryStreamConverter converter, ShapefileDataStore dataStore, List<String> keyAttributes) throws Exception { SimpleFeatureType schema = dataStore.getSchema(); int numFeatures = dataStore.getCount(Query.ALL); FeatureReader<SimpleFeatureType, SimpleFeature> reader = null; try { List<AttributeDescriptor> attrDesc = schema.getAttributeDescriptors(); String header = "\"the_geom_id\", \"the_geom_key\""; for (int i = 1; i < attrDesc.size(); i++) { String colName = attrDesc.get(i).getLocalName(); if (GeometryStreamConverter.debugDBF) header += ", \"" + colName + '"'; // if any specified attribute matches colName, case insensitive, overwrite specified // attribute name with name having correct case for (int j = 0; j < keyAttributes.size(); j++) if (keyAttributes.get(j).equalsIgnoreCase(colName)) keyAttributes.set(j, colName); } // debug: read schema and print it out if (GeometryStreamConverter.debugDBF) System.out.println(header); // loop through features and parse them long startTime = System.currentTimeMillis(), endTime = startTime, debugInterval = 60000, nextDebugTime = startTime + debugInterval; int featureCount = 0; reader = dataStore.getFeatureReader(); CoordinateReferenceSystem projection = schema.getCoordinateReferenceSystem(); // may be null String projectionWKT = projection == null ? null : projection.toWKT(); while (reader.hasNext()) { endTime = System.currentTimeMillis(); if (GeometryStreamConverter.debugTime && endTime > nextDebugTime) { System.out.println( String.format( "Processing %s/%s features, %s minutes elapsed", featureCount, numFeatures, (endTime - startTime) / 60000.0)); while (endTime > nextDebugTime) nextDebugTime += debugInterval; } convertFeature(converter, reader.next(), keyAttributes, projectionWKT); featureCount++; } if (GeometryStreamConverter.debugTime && endTime - startTime > debugInterval) System.out.println( String.format( "Processing %s features completed in %s minutes", numFeatures, (endTime - startTime) / 60000.0)); } catch (OutOfMemoryError e) { e.printStackTrace(); throw e; } finally { try { if (reader != null) reader.close(); } catch (IOException e) { } } }
/** * Checks if the given schema contains a GeometryDescriptor that has a generic Geometry type. * * @param schema */ private boolean hasMixedGeometry(SimpleFeatureType schema) { for (AttributeDescriptor attDesc : schema.getAttributeDescriptors()) { if (isMixedGeometry(attDesc)) { return true; } } return false; }
private SimpleFeatureType createForceFeatureType(SimpleFeatureType featureType, String path) { SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.setAttributes(featureType.getAttributeDescriptors()); builder.setName(new NameImpl(featureType.getName().getNamespaceURI(), path)); builder.setCRS(featureType.getCoordinateReferenceSystem()); featureType = builder.buildFeatureType(); return featureType; }
@Override public void postCreateTable(String schemaName, SimpleFeatureType featureType, Connection cx) throws SQLException, IOException { // create any geometry columns entries after the fact for (AttributeDescriptor ad : featureType.getAttributeDescriptors()) { if (ad instanceof GeometryDescriptor) { GeometryDescriptor gd = (GeometryDescriptor) ad; StringBuffer sql = new StringBuffer("INSERT INTO geometry_columns VALUES ("); // table name sql.append("'").append(featureType.getTypeName()).append("',"); // geometry name sql.append("'").append(gd.getLocalName()).append("',"); // type String gType = Geometries.getForBinding((Class<? extends Geometry>) gd.getType().getBinding()) .getName(); if (gType == null) { throw new IOException("Unknown geometry type: " + gd.getType().getBinding()); } sql.append("'").append(gType).append("',"); // coord dimension sql.append(2).append(","); // srid Integer epsgCode = null; if (gd.getCoordinateReferenceSystem() != null) { CoordinateReferenceSystem crs = gd.getCoordinateReferenceSystem(); try { epsgCode = CRS.lookupEpsgCode(crs, true); } catch (Exception e) { } } if (epsgCode == null) { throw new IOException("Unable to find epsg code code."); } sql.append(epsgCode).append(","); // spatial index enabled sql.append(0).append(")"); LOGGER.fine(sql.toString()); Statement st = cx.createStatement(); try { st.executeUpdate(sql.toString()); } finally { dataStore.closeSafe(st); } } } }
public XmlSimpleFeatureParser( final InputStream getFeatureResponseStream, final SimpleFeatureType targetType, QName featureDescriptorName, String axisOrder) throws IOException { // this.inputStream = new TeeInputStream(inputStream, System.err); this.inputStream = getFeatureResponseStream; this.featureNamespace = featureDescriptorName.getNamespaceURI(); this.featureName = featureDescriptorName.getLocalPart(); this.targetType = targetType; this.builder = new SimpleFeatureBuilder(targetType); this.axisOrder = axisOrder; XmlPullParserFactory factory; try { factory = XmlPullParserFactory.newInstance(); factory.setNamespaceAware(true); factory.setValidating(false); // parse root element parser = factory.newPullParser(); parser.setInput(inputStream, "UTF-8"); parser.nextTag(); parser.require(START_TAG, null, WFS.FeatureCollection.getLocalPart()); String nof = parser.getAttributeValue(null, "numberOfFeatures"); if (nof != null) { try { this.numberOfFeatures = Integer.valueOf(nof); } catch (NumberFormatException nfe) { LOGGER.warning("Can't parse numberOfFeatures out of " + nof); } } } catch (XmlPullParserException e) { throw new DataSourceException(e); } // HACK! use a case insensitive set to compare the comming attribute names with the ones in // the schema. Rationale being that the FGDC CubeWerx server has a missmatch in the case of // property names between what it states in a DescribeFeatureType and in a GetFeature // requests expectedProperties = new TreeMap<String, AttributeDescriptor>(String.CASE_INSENSITIVE_ORDER); for (AttributeDescriptor desc : targetType.getAttributeDescriptors()) { expectedProperties.put(desc.getLocalName(), desc); } }
/** * Compare input and output schemas for different case mapping in attribute names. * * @param destSchema * @param schema * @return */ protected Map<String, String> compareSchemas( SimpleFeatureType destSchema, SimpleFeatureType schema) { Map<String, String> diffs = new HashMap<String, String>(); for (AttributeDescriptor ad : destSchema.getAttributeDescriptors()) { String attribute = ad.getLocalName(); if (schema.getDescriptor(attribute) == null) { for (String variant : getNameVariants(attribute)) { if (schema.getDescriptor(variant) != null) { diffs.put(attribute, variant); break; } } } } return diffs; }
/** * Copys a feature type. * * <p>This method does a deep copy in that all individual attributes are copied as well. */ public static SimpleFeatureType copy(SimpleFeatureType original) { SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); // initialize the builder b.init(original); // clear attributes b.attributes().clear(); // add attributes in order for (AttributeDescriptor descriptor : original.getAttributeDescriptors()) { AttributeTypeBuilder ab = new AttributeTypeBuilder(b.factory); ab.init(descriptor); b.add(ab.buildDescriptor(descriptor.getLocalName())); } return b.buildFeatureType(); }
/** Initializes the builder with state from a pre-existing feature type. */ public void init(SimpleFeatureType type) { init(); if (type == null) return; uri = type.getName().getNamespaceURI(); local = type.getName().getLocalPart(); description = type.getDescription(); restrictions = null; restrictions().addAll(type.getRestrictions()); this.defaultCrs = type.getCoordinateReferenceSystem(); this.defaultCrsSet = true; attributes = null; attributes().addAll(type.getAttributeDescriptors()); isAbstract = type.isAbstract(); superType = (SimpleFeatureType) type.getSuper(); }
@Override public Object[] parseRecords(final int offset, final int numRecords, final String rowName) throws IOException { AttributeDescriptor attributeDescriptor = simpleFeatureType.getDescriptor(rowName); int expectedTokenCount = simpleFeatureType.getAttributeCount(); expectedTokenCount += hasFeatureId ? 1 : 0; int rowIndex = simpleFeatureType.getAttributeDescriptors().indexOf(attributeDescriptor); int tokenIndex = rowIndex + (hasFeatureId ? 1 : 0); List<Object> values = new ArrayList<>(numRecords); skipToLine(offset); String line; long featureCount = offset; while ((numRecords == -1 || featureCount < offset + numRecords) && (line = stream.readLine()) != null) { String[] tokens = getTokens(line); if (tokens == null) { break; } if (tokens.length != expectedTokenCount) { continue; } featureCount++; String token = tokens[tokenIndex]; try { Object value = null; if (!VectorDataNodeIO.NULL_TEXT.equals(token)) { value = converters[rowIndex].parse(token); } values.add(value); } catch (ConversionException e) { BeamLogManager.getSystemLogger() .warning(String.format("Problem in '%s': %s", csv.getPath(), e.getMessage())); } bytePositionForOffset.put(featureCount, stream.getStreamPosition()); } return values.toArray(); }
private static List<String> getQueryColumns( final String[] queryProperties, final Filter unsupportedFilter, final SimpleFeatureType fullSchema) throws DataSourceException { final List<String> columnNames = new ArrayList<String>(); if ((queryProperties == null) || (queryProperties.length == 0)) { final List<AttributeDescriptor> attNames = fullSchema.getAttributeDescriptors(); for (Iterator<AttributeDescriptor> it = attNames.iterator(); it.hasNext(); ) { AttributeDescriptor att = it.next(); String attName = att.getLocalName(); // de namespace-ify the names // REVISIT: this shouldnt be needed! if (attName.indexOf(":") != -1) { attName = attName.substring(attName.indexOf(":") + 1); } columnNames.add(attName); } } else { columnNames.addAll(Arrays.asList(queryProperties)); // Ok, say we don't support the full filter natively and it references // some properties, then they have to be retrieved in order to evaluate // the filter at runtime if (unsupportedFilter != null) { final FilterAttributeExtractor attExtractor; attExtractor = new FilterAttributeExtractor(fullSchema); unsupportedFilter.accept(attExtractor, null); final String[] filterAtts = attExtractor.getAttributeNames(); for (String attName : filterAtts) { if (!columnNames.contains(attName)) { columnNames.add(attName); } } } } return columnNames; }
/** @see org.eclipse.ui.views.properties.IPropertySource#getPropertyDescriptors() */ public IPropertyDescriptor[] getPropertyDescriptors() { if (descriptors == null) { List<IPropertyDescriptor> desc = new ArrayList<IPropertyDescriptor>(); AttributeDescriptor[] attrs = type.getAttributeDescriptors().toArray(new AttributeDescriptor[0]); PropertyDescriptor d; for (int i = 0; i < attrs.length; i++) { String name = attrs[i].getLocalName().toLowerCase(); name = name.substring(0, 1).toUpperCase() + name.substring(1); d = new PropertyDescriptor(Integer.valueOf(i), name); if (attrs[i] instanceof GeometryDescriptor) d.setCategory(Messages.ScemaDescriptor_geometry); else d.setCategory(Messages.ScemaDescriptor_attributeTypes); desc.add(d); } descriptors = new IPropertyDescriptor[desc.size()]; desc.toArray(descriptors); } IPropertyDescriptor[] c = new IPropertyDescriptor[descriptors.length]; System.arraycopy(descriptors, 0, c, 0, c.length); return c; }
/** * Clones the given schema, changing the geometry attribute to match the given dimensionality. * * @param schema schema to clone * @param dimensionality dimensionality for the geometry 1= points, 2= lines, 3= polygons */ private FeatureType cloneWithDimensionality(FeatureType schema, int dimensionality) { SimpleFeatureType simpleFt = (SimpleFeatureType) schema; SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder(); builder.setName(schema.getName()); builder.setCRS(schema.getCoordinateReferenceSystem()); for (AttributeDescriptor desc : simpleFt.getAttributeDescriptors()) { if (isMixedGeometry(desc)) { GeometryDescriptor geomDescriptor = (GeometryDescriptor) desc; GeometryType geomType = geomDescriptor.getType(); Class<?> geometryClass = getGeometryForDimensionality(dimensionality); GeometryType gt = new GeometryTypeImpl( geomType.getName(), geometryClass, geomType.getCoordinateReferenceSystem(), geomType.isIdentified(), geomType.isAbstract(), geomType.getRestrictions(), geomType.getSuper(), geomType.getDescription()); builder.add( new GeometryDescriptorImpl( gt, geomDescriptor.getName(), geomDescriptor.getMinOccurs(), geomDescriptor.getMaxOccurs(), geomDescriptor.isNillable(), geomDescriptor.getDefaultValue())); } else { builder.add(desc); } } schema = builder.buildFeatureType(); return schema; }
/** * Create a SimpleFeatureType with the same content; just updating the geometry attribute to match * the provided coordinate reference system. * * @param original SimpleFeatureType * @param crs CoordinateReferenceSystem of result * @return SimpleFeatureType updated with the provided CoordinateReferenceSystem */ public static SimpleFeatureType retype( SimpleFeatureType original, CoordinateReferenceSystem crs) { SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); // initialize the builder b.init(original); // clear the attributes b.attributes().clear(); // add attributes in order for (AttributeDescriptor descriptor : original.getAttributeDescriptors()) { if (descriptor instanceof GeometryDescriptor) { GeometryDescriptor geometryDescriptor = (GeometryDescriptor) descriptor; AttributeTypeBuilder adjust = new AttributeTypeBuilder(b.factory); adjust.init(geometryDescriptor); adjust.setCRS(crs); b.add(adjust.buildDescriptor(geometryDescriptor.getLocalName())); continue; } b.add(descriptor); } return b.buildFeatureType(); }
/** Creates GEOMETRY_COLUMN registrations and spatial indexes for all geometry columns */ @Override public void postCreateTable(String schemaName, SimpleFeatureType featureType, Connection cx) throws SQLException { schemaName = schemaName != null ? schemaName : "public"; String tableName = featureType.getName().getLocalPart(); Statement st = null; try { st = cx.createStatement(); // register all geometry columns in the database for (AttributeDescriptor att : featureType.getAttributeDescriptors()) { if (att instanceof GeometryDescriptor) { GeometryDescriptor gd = (GeometryDescriptor) att; // lookup or reverse engineer the srid int srid = -1; if (gd.getUserData().get(JDBCDataStore.JDBC_NATIVE_SRID) != null) { srid = (Integer) gd.getUserData().get(JDBCDataStore.JDBC_NATIVE_SRID); } else if (gd.getCoordinateReferenceSystem() != null) { try { Integer result = CRS.lookupEpsgCode(gd.getCoordinateReferenceSystem(), true); if (result != null) srid = result; } catch (Exception e) { LOGGER.log( Level.FINE, "Error looking up the " + "epsg code for metadata " + "insertion, assuming -1", e); } } // assume 2 dimensions, but ease future customisation int dimensions = 2; // grab the geometry type String geomType = CLASS_TO_TYPE_MAP.get(gd.getType().getBinding()); if (geomType == null) geomType = "GEOMETRY"; String sql = null; if (getVersion(cx).compareTo(V_2_0_0) >= 0) { // postgis 2 and up we don't muck with geometry_columns, we just alter the // type directly to set the geometry type and srid // setup the geometry type sql = "ALTER TABLE \"" + schemaName + "\".\"" + tableName + "\" " + "ALTER COLUMN \"" + gd.getLocalName() + "\" " + "TYPE geometry (" + geomType + ", " + srid + ");"; LOGGER.fine(sql); st.execute(sql); } else { // register the geometry type, first remove and eventual // leftover, then write out the real one sql = "DELETE FROM GEOMETRY_COLUMNS" + " WHERE f_table_catalog=''" // + " AND f_table_schema = '" + schemaName + "'" // + " AND f_table_name = '" + tableName + "'" // + " AND f_geometry_column = '" + gd.getLocalName() + "'"; LOGGER.fine(sql); st.execute(sql); sql = "INSERT INTO GEOMETRY_COLUMNS VALUES (''," // + "'" + schemaName + "'," // + "'" + tableName + "'," // + "'" + gd.getLocalName() + "'," // + dimensions + "," // + srid + "," // + "'" + geomType + "')"; LOGGER.fine(sql); st.execute(sql); } // add srid checks if (srid > -1) { sql = "ALTER TABLE " // + "\"" + schemaName + "\"" // + "." // + "\"" + tableName + "\"" // + " ADD CONSTRAINT \"enforce_srid_" // + gd.getLocalName() + "\"" // + " CHECK (ST_SRID(" // + "\"" + gd.getLocalName() + "\"" // + ") = " + srid + ")"; LOGGER.fine(sql); st.execute(sql); } // add dimension checks sql = "ALTER TABLE " // + "\"" + schemaName + "\"" // + "." // + "\"" + tableName + "\"" // + " ADD CONSTRAINT \"enforce_dims_" // + gd.getLocalName() + "\"" // + " CHECK (st_ndims(\"" + gd.getLocalName() + "\")" // + " = 2)"; LOGGER.fine(sql); st.execute(sql); // add geometry type checks if (!geomType.equals("GEOMETRY")) { sql = "ALTER TABLE " // + "\"" + schemaName + "\"" // + "." // + "\"" + tableName + "\"" // + " ADD CONSTRAINT \"enforce_geotype_" // + gd.getLocalName() + "\"" // + " CHECK (geometrytype(" // + "\"" + gd.getLocalName() + "\"" // + ") = '" + geomType + "'::text " + "OR \"" + gd.getLocalName() + "\"" // + " IS NULL)"; LOGGER.fine(sql); st.execute(sql); } // add the spatial index sql = "CREATE INDEX \"spatial_" + tableName // + "_" + gd.getLocalName().toLowerCase() + "\"" // + " ON " // + "\"" + schemaName + "\"" // + "." // + "\"" + tableName + "\"" // + " USING GIST (" // + "\"" + gd.getLocalName() + "\"" // + ")"; LOGGER.fine(sql); st.execute(sql); } } if (!cx.getAutoCommit()) { cx.commit(); } } finally { dataStore.closeSafe(st); } }
@Override public List<ReferencedEnvelope> getOptimizedBounds( String schema, SimpleFeatureType featureType, Connection cx) throws SQLException, IOException { if (!estimatedExtentsEnabled) return null; String tableName = featureType.getTypeName(); Statement st = null; ResultSet rs = null; List<ReferencedEnvelope> result = new ArrayList<ReferencedEnvelope>(); Savepoint savePoint = null; try { st = cx.createStatement(); if (!cx.getAutoCommit()) { savePoint = cx.setSavepoint(); } for (AttributeDescriptor att : featureType.getAttributeDescriptors()) { if (att instanceof GeometryDescriptor) { // use estimated extent (optimizer statistics) StringBuffer sql = new StringBuffer(); sql.append("select ST_AsText(ST_force_2d(ST_Envelope(ST_Estimated_Extent('"); if (schema != null) { sql.append(schema); sql.append("', '"); } sql.append(tableName); sql.append("', '"); sql.append(att.getName().getLocalPart()); sql.append("'))))"); rs = st.executeQuery(sql.toString()); if (rs.next()) { // decode the geometry Envelope env = decodeGeometryEnvelope(rs, 1, cx); // reproject and merge if (!env.isNull()) { CoordinateReferenceSystem crs = ((GeometryDescriptor) att).getCoordinateReferenceSystem(); result.add(new ReferencedEnvelope(env, crs)); } } rs.close(); } } } catch (SQLException e) { if (savePoint != null) { cx.rollback(savePoint); } LOGGER.log( Level.WARNING, "Failed to use ST_Estimated_Extent, falling back on envelope aggregation", e); return null; } finally { if (savePoint != null) { cx.releaseSavepoint(savePoint); } dataStore.closeSafe(rs); dataStore.closeSafe(st); } return result; }
/** Executes the export command using the provided options. */ @Override protected final void runInternal(GeogigCLI cli) throws IOException { if (args.isEmpty()) { printUsage(cli); throw new CommandFailedException(); } String path = args.get(0); String tableName = args.get(1); checkParameter(tableName != null && !tableName.isEmpty(), "No table name specified"); DataStore dataStore = getDataStore(); ObjectId featureTypeId = null; if (!Arrays.asList(dataStore.getTypeNames()).contains(tableName)) { SimpleFeatureType outputFeatureType; if (sFeatureTypeId != null) { // Check the feature type id string is a correct id Optional<ObjectId> id = cli.getGeogig().command(RevParse.class).setRefSpec(sFeatureTypeId).call(); checkParameter(id.isPresent(), "Invalid feature type reference", sFeatureTypeId); TYPE type = cli.getGeogig().command(ResolveObjectType.class).setObjectId(id.get()).call(); checkParameter( type.equals(TYPE.FEATURETYPE), "Provided reference does not resolve to a feature type: ", sFeatureTypeId); outputFeatureType = (SimpleFeatureType) cli.getGeogig() .command(RevObjectParse.class) .setObjectId(id.get()) .call(RevFeatureType.class) .get() .type(); featureTypeId = id.get(); } else { try { SimpleFeatureType sft = getFeatureType(path, cli); outputFeatureType = new SimpleFeatureTypeImpl( new NameImpl(tableName), sft.getAttributeDescriptors(), sft.getGeometryDescriptor(), sft.isAbstract(), sft.getRestrictions(), sft.getSuper(), sft.getDescription()); } catch (GeoToolsOpException e) { throw new CommandFailedException("No features to export.", e); } } try { dataStore.createSchema(outputFeatureType); } catch (IOException e) { throw new CommandFailedException("Cannot create new table in database", e); } } else { if (!overwrite) { throw new CommandFailedException("The selected table already exists. Use -o to overwrite"); } } SimpleFeatureSource featureSource = dataStore.getFeatureSource(tableName); if (!(featureSource instanceof SimpleFeatureStore)) { throw new CommandFailedException("Can't write to the selected table"); } SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource; if (overwrite) { try { featureStore.removeFeatures(Filter.INCLUDE); } catch (IOException e) { throw new CommandFailedException("Error truncating table: " + e.getMessage(), e); } } ExportOp op = cli.getGeogig() .command(ExportOp.class) .setFeatureStore(featureStore) .setPath(path) .setFilterFeatureTypeId(featureTypeId) .setAlter(alter); if (defaultType) { op.exportDefaultFeatureType(); } try { op.setProgressListener(cli.getProgressListener()).call(); } catch (IllegalArgumentException iae) { throw new org.locationtech.geogig.cli.InvalidParameterException(iae.getMessage(), iae); } catch (GeoToolsOpException e) { switch (e.statusCode) { case MIXED_FEATURE_TYPES: throw new CommandFailedException( "The selected tree contains mixed feature types. Use --defaulttype or --featuretype <feature_type_ref> to export.", e); default: throw new CommandFailedException("Could not export. Error:" + e.statusCode.name(), e); } } cli.getConsole().println(path + " exported successfully to " + tableName); }
/* * Alte methode versuch wie shapefile zu importieren. könnte teilweise noch * hilfreich sein <- war doch richtig.. */ public void readFile(String tableName, File file) throws IOException { final InputStream inputstream = new FileInputStream(file); GeometryJSON gjson = new GeometryJSON(15); FeatureJSON fjson = new FeatureJSON(gjson); // System.out.println(fjson.readCRS(inputstream)); FeatureCollection<SimpleFeatureType, SimpleFeature> jfc = fjson.readFeatureCollection(inputstream); final SimpleFeatureType jsonSchema = jfc.getSchema(); final SimpleFeatureTypeBuilder dbSftBuilder = new SimpleFeatureTypeBuilder(); final List<AttributeDescriptor> ads = jsonSchema.getAttributeDescriptors(); for (final AttributeDescriptor ad : ads) { final String n = ad.getName().toString(); final Name name = new NameImpl(n.toUpperCase()); final AttributeDescriptor t = new AttributeDescriptorImpl( ad.getType(), name, ad.getMinOccurs(), ad.getMaxOccurs(), ad.isNillable(), ad.getDefaultValue()); dbSftBuilder.add(t); } // TODO: EPSG!! this.epsg = ""; this.geomType = jsonSchema.getGeometryDescriptor().getType().getBinding().getName(); dbSftBuilder.setName(new NameImpl(tableName)); final SimpleFeatureType dbSchema = dbSftBuilder.buildFeatureType(); List<SimpleFeature> sfList = new ArrayList<SimpleFeature>(); // Getting Geomtries and Attributes right try (FeatureIterator<SimpleFeature> jsonFeatures = jfc.features()) { String bar = null; while (jsonFeatures.hasNext()) { final SimpleFeature sf = jsonFeatures.next(); String foo = sf.getDefaultGeometry().getClass().getName(); // falls verschiedene Geoemtrien im JSON sind nur die erste // Anzeigen, alle anderen ignorieren. if (bar != null && !foo.equals(bar)) { System.out.println(foo); continue; } SimpleFeatureBuilder dbSfBuilder = new SimpleFeatureBuilder(dbSchema); for (final AttributeDescriptor ad : jsonSchema.getAttributeDescriptors()) { final String attr = ad.getLocalName(); final String name = attr.toUpperCase(); Object obj = sf.getAttribute(attr); dbSfBuilder.set(name, obj); } final SimpleFeature of = dbSfBuilder.buildFeature(null); sfList.add(of); bar = foo; } } FeatureCollection<SimpleFeatureType, SimpleFeature> collection = new ListFeatureCollection(dbSchema, sfList); dbCollection = collection; }
/** * Start editing the provided symbolizer. * * @param schema * @param sym * @param mode */ public void set(SimpleFeatureType schema, TextSymbolizer sym, Mode mode) { listen(false); try { this.schema = schema; this.enabled = (mode != Mode.NONE && sym != null); this.font = SLDs.textFont(sym); if (this.font == null || this.font.length == 0) { this.font = new FontData[] {new FontData("Arial", 12, SWT.NORMAL)}; // $NON-NLS-1$ } this.labelType = SLDs.textLabelString(sym); this.colour = SLDs.textFontFill(sym); if (this.colour == null) { this.colour = Color.BLACK; } this.on.setEnabled(mode != Mode.NONE); this.fonter.setColorValue(this.colour); this.fonter.setFontList(this.font); if (schema != null) { AttributeDescriptor[] attrs = schema.getAttributeDescriptors().toArray(new AttributeDescriptor[0]); List<String> list = new ArrayList<String>(); for (int i = 0; i < attrs.length; i++) { Class cls = attrs[i].getType().getBinding(); if (String.class.isAssignableFrom(cls)) { list.add(attrs[i].getName().getLocalPart()); } else if (Number.class.isAssignableFrom(cls)) { list.add(attrs[i].getName().getLocalPart()); } } this.field.removeAll(); this.field.setItems(list.toArray(new String[0])); if (this.labelType == null) { this.field.select(0); } else { this.field.setText(this.labelType); } } this.on.setSelection(this.enabled); this.field.setEnabled(this.enabled); this.fonter.setEnabled(this.enabled); if (schema != null && (SLD.isLine(schema) == pointPlacement || this.place == null)) { pointPlacement = !SLD.isLine(schema); if (pointPlacement) { initPlacementContentsPoint(); } else { initPlacementContentsLine(); } } this.place.setEnabled(this.enabled); if (pointPlacement) { // PointPlacement this.place2.setEnabled(this.enabled); this.place3.setEnabled(this.enabled); if (this.labelPlacement == null || !(this.labelPlacement instanceof PointPlacement)) { // defaults if (mode == Mode.POINT) { // don't cover the point! this.place.select(2); // top this.place2.select(2); // right } else { this.place.select(1); // middle this.place2.select(1); // center } this.place3.select(0); // 0 degrees rotation } else { AnchorPoint anchor = ((PointPlacement) labelPlacement).getAnchorPoint(); String anchorX = anchor.getAnchorPointX().toString(); String anchorY = anchor.getAnchorPointY().toString(); // use labels if 0, 0.5, or 1, otherwise use value for align if (anchorX.equals(Double.toString(SLDs.ALIGN_LEFT))) { this.place2.select(0); } else if (anchorX.equals(Double.toString(SLDs.ALIGN_CENTER))) { this.place2.select(1); } else if (anchorX.equals(Double.toString(SLDs.ALIGN_RIGHT))) { this.place2.select(2); } else { this.place2.setText(anchorX); } if (anchorY.equals(Double.toString(SLDs.ALIGN_BOTTOM))) { this.place.select(0); } else if (anchorY.equals(Double.toString(SLDs.ALIGN_MIDDLE))) { this.place.select(1); } else if (anchorY.equals(Double.toString(SLDs.ALIGN_TOP))) { this.place.select(2); } else { this.place.setText(anchorY); } // rotation this.place3.setText(((PointPlacement) labelPlacement).getRotation().toString()); } } else { // LinePlacement if (this.labelPlacement == null || !(this.labelPlacement instanceof LinePlacement)) { // defaults this.place.setText( ProjectPlugin.getPlugin() .getPreferenceStore() .getString(PreferenceConstants.P_STYLE_DEFAULT_PERPENDICULAR_OFFSET)); } else { String offset = ((LinePlacement) labelPlacement).getPerpendicularOffset().toString(); this.place.setText(offset); } } } finally { listen(true); } }
/** * Applies a set of heuristics to find which target attribute corresponds to a certain input * attribute * * @param sourceType * @param targetType * @return */ Map<String, String> buildAttributeMapping( SimpleFeatureType sourceType, SimpleFeatureType targetType) { // look for the typical manglings. For example, if the target is a // shapefile store it will move the geometry and name it the_geom // collect the source names Set<String> sourceNames = new HashSet<String>(); for (AttributeDescriptor sd : sourceType.getAttributeDescriptors()) { sourceNames.add(sd.getLocalName()); } // first check if we have been kissed by sheer luck and the names are // the same Map<String, String> result = new HashMap<String, String>(); for (String name : sourceNames) { if (targetType.getDescriptor(name) != null) { result.put(name, name); } } sourceNames.removeAll(result.keySet()); // then check for simple case difference (Oracle case) for (String name : sourceNames) { for (AttributeDescriptor td : targetType.getAttributeDescriptors()) { if (td.getLocalName().equalsIgnoreCase(name)) { result.put(name, td.getLocalName()); break; } } } sourceNames.removeAll(result.keySet()); // then check attribute names being cut (another Oracle case) for (String name : sourceNames) { String loName = name.toLowerCase(); for (AttributeDescriptor td : targetType.getAttributeDescriptors()) { String tdName = td.getLocalName().toLowerCase(); if (loName.startsWith(tdName)) { result.put(name, td.getLocalName()); break; } } } sourceNames.removeAll(result.keySet()); // consider the shapefile geometry descriptor mangling if (targetType.getGeometryDescriptor() != null && "the_geom".equals(targetType.getGeometryDescriptor().getLocalName()) && !"the_geom".equalsIgnoreCase(sourceType.getGeometryDescriptor().getLocalName())) { result.put(sourceType.getGeometryDescriptor().getLocalName(), "the_geom"); } // and finally we return with as much as we can match if (!sourceNames.isEmpty()) { LOGGER.warning( "Could not match the following attributes " + sourceNames + " to the target feature type ones: " + targetType); } return result; }
public List<FeatureId> addFeatures(FeatureCollection<SimpleFeatureType, SimpleFeature> collection) throws IOException { WFSTransactionState ts = null; if (trans == Transaction.AUTO_COMMIT) { ts = new WFSTransactionState(ds); } else { ts = (WFSTransactionState) trans.getState(ds); } List<FeatureId> r = new LinkedList<FeatureId>(); SimpleFeatureType schema = getSchema(); LenientBuilder build = new LenientBuilder(schema); boolean isLenient = true; if (schema.getUserData().containsKey("lenient")) { isLenient = (Boolean) schema.getUserData().get("lenient"); } if (isLenient) { build.setFeatureFactory(new LenientFeatureFactory()); } List<AttributeDescriptor> atrs = schema.getAttributeDescriptors(); FeatureIterator<SimpleFeature> iter = collection.features(); try { ReferencedEnvelope bounds = null; while (iter.hasNext()) { try { SimpleFeature newFeature; try { SimpleFeature f = iter.next(); String nextFid = ts.nextFid(schema.getTypeName()); Object[] values = f.getAttributes().toArray(); build.addAll(values); newFeature = build.buildFeature(nextFid); r.add(newFeature.getIdentifier()); } catch (IllegalAttributeException e) { throw (IOException) new IOException(e.getLocalizedMessage()); } for (int i = 0; i < atrs.size(); i++) { AttributeDescriptor att = atrs.get(i); if (att instanceof GeometryDescriptor) { Object geom = newFeature.getAttribute(i); if (geom instanceof Geometry) { Geometry g = (Geometry) geom; CoordinateReferenceSystem cs = ((GeometryDescriptor) att).getCoordinateReferenceSystem(); if (g == null) continue; if (cs != null && !cs.getIdentifiers().isEmpty()) g.setUserData(cs.getIdentifiers().iterator().next().toString()); if (bounds == null) { bounds = new ReferencedEnvelope( g.getEnvelopeInternal(), schema.getCoordinateReferenceSystem()); } else { bounds.expandToInclude(g.getEnvelopeInternal()); } } } } ts.addAction(schema.getTypeName(), new InsertAction(newFeature)); } catch (NoSuchElementException e) { WFS_1_0_0_DataStore.LOGGER.warning(e.toString()); throw new IOException(e.toString()); } } // Fire a notification. // JE if (bounds == null) { // if bounds are null then send an envelope to say that features were added but // at an unknown location. bounds = new ReferencedEnvelope(getSchema().getCoordinateReferenceSystem()); ((WFS_1_0_0_DataStore) getDataStore()) .listenerManager.fireFeaturesRemoved( schema.getTypeName(), getTransaction(), bounds, false); } else { ((WFS_1_0_0_DataStore) getDataStore()) .listenerManager.fireFeaturesRemoved( schema.getTypeName(), getTransaction(), bounds, false); } } finally { iter.close(); } if (trans == Transaction.AUTO_COMMIT) { ts.commit(); String[] fids = ts.getFids(schema.getTypeName()); int i = 0; for (String fid : fids) { FeatureId identifier = r.get(i); if (identifier instanceof FeatureIdImpl) { ((FeatureIdImpl) identifier).setID(fid); } i++; } return r; } return r; }
@Override public void postCreateTable(String schemaName, SimpleFeatureType featureType, Connection cx) throws SQLException { String tableName = featureType.getName().getLocalPart(); String sql; Statement st = null; try { st = cx.createStatement(); // register all geometry columns in the database for (AttributeDescriptor att : featureType.getAttributeDescriptors()) { if (att instanceof GeometryDescriptor) { GeometryDescriptor gd = (GeometryDescriptor) att; // this class is already set right, continue if (gd.getType().getBinding() == Geometry.class) { continue; } // lookup or reverse engineer the srid int srid = -1; if (gd.getUserData().get(JDBCDataStore.JDBC_NATIVE_SRID) != null) { srid = (Integer) gd.getUserData().get(JDBCDataStore.JDBC_NATIVE_SRID); } else if (gd.getCoordinateReferenceSystem() != null) { try { Integer result = CRS.lookupEpsgCode(gd.getCoordinateReferenceSystem(), true); if (result != null) srid = result; } catch (Exception e) { LOGGER.log( Level.FINE, "Error looking up the " + "epsg code for metadata " + "insertion, assuming -1", e); } } // grab the geometry type String geomType = CLASS_TO_TYPE_MAP.get(gd.getType().getBinding()); if (geomType == null) geomType = "GEOMETRY"; // alter the table to use the right type if (schemaName != null) { sql = "ALTER TABLE \"" + schemaName + "\".\"" + tableName + "\" ALTER COLUMN \"" + gd.getLocalName() + "\" " + geomType; } else { sql = "ALTER TABLE \"" + tableName + "\" ALTER COLUMN \"" + gd.getLocalName() + "\" " + geomType; } if (srid != -1) { sql += " SRID " + srid; } st.execute(sql); } } } finally { dataStore.closeSafe(st); } }