/**
   * Forces the specified CRS on geometry attributes (all or some, depends on the parameters).
   *
   * @param schema the original schema
   * @param crs the forced crs
   * @param forceOnlyMissing if true, will force the specified crs only on the attributes that do
   *     miss one
   * @return
   * @throws SchemaException
   */
  public static SimpleFeatureType transform(
      SimpleFeatureType schema, CoordinateReferenceSystem crs, boolean forceOnlyMissing)
      throws SchemaException {
    SimpleFeatureTypeBuilder tb = new SimpleFeatureTypeBuilder();
    tb.setName(schema.getTypeName());
    tb.setNamespaceURI(schema.getName().getNamespaceURI());
    tb.setAbstract(schema.isAbstract());

    GeometryDescriptor defaultGeometryType = null;
    for (int i = 0; i < schema.getAttributeCount(); i++) {
      AttributeDescriptor attributeType = schema.getDescriptor(i);
      if (attributeType instanceof GeometryDescriptor) {
        GeometryDescriptor geometryType = (GeometryDescriptor) attributeType;
        AttributeDescriptor forced;

        tb.descriptor(geometryType);
        if (!forceOnlyMissing || geometryType.getCoordinateReferenceSystem() == null) {
          tb.crs(crs);
        }

        tb.add(geometryType.getLocalName(), geometryType.getType().getBinding());
      } else {
        tb.add(attributeType);
      }
    }
    if (schema.getGeometryDescriptor() != null) {
      tb.setDefaultGeometry(schema.getGeometryDescriptor().getLocalName());
    }

    tb.setSuperType((SimpleFeatureType) schema.getSuper());

    return tb.buildFeatureType();
  }
    public IntersectedFeatureCollection(
        SimpleFeatureCollection delegate,
        List<String> firstAttributes,
        SimpleFeatureCollection features,
        List<String> sndAttributes,
        IntersectionMode intersectionMode,
        boolean percentagesEnabled,
        boolean areasEnabled) {
      super(delegate);
      this.features = features;
      this.firstAttributes = firstAttributes;
      this.sndAttributes = sndAttributes;
      this.intersectionMode = intersectionMode;
      this.percentagesEnabled = percentagesEnabled;
      this.areasEnabled = areasEnabled;
      SimpleFeatureTypeBuilder tb = new SimpleFeatureTypeBuilder();

      SimpleFeatureType firstFeatureCollectionSchema = delegate.getSchema();
      SimpleFeatureType secondFeatureCollectionSchema = features.getSchema();

      if (intersectionMode == IntersectionMode.FIRST) {
        geomType = firstFeatureCollectionSchema.getGeometryDescriptor();
      }
      if (intersectionMode == IntersectionMode.SECOND) {
        geomType = secondFeatureCollectionSchema.getGeometryDescriptor();
      }
      if (intersectionMode == IntersectionMode.INTERSECTION) {
        geomType = getIntersectionType(delegate, features);
      }
      tb.add(geomType);

      // gather the attributes from the first feature collection and skip
      collectAttributes(firstFeatureCollectionSchema, firstAttributes, tb);
      // gather the attributes from the second feature collection
      collectAttributes(secondFeatureCollectionSchema, sndAttributes, tb);
      // add the dyamic attributes as needed
      if (percentagesEnabled) {
        tb.add("percentageA", Double.class);
        tb.add("percentageB", Double.class);
      }
      if (areasEnabled) {
        tb.add("areaA", Double.class);
        tb.add("areaB", Double.class);
      }
      tb.add("INTERSECTION_ID", Integer.class);
      tb.setDescription(firstFeatureCollectionSchema.getDescription());
      tb.setCRS(firstFeatureCollectionSchema.getCoordinateReferenceSystem());
      tb.setAbstract(firstFeatureCollectionSchema.isAbstract());
      tb.setSuperType((SimpleFeatureType) firstFeatureCollectionSchema.getSuper());
      tb.setName(firstFeatureCollectionSchema.getName());

      this.fb = new SimpleFeatureBuilder(tb.buildFeatureType());
    }
 @Override
 public SimpleFeature decode(String recordId, String[] csvRecord) {
   SimpleFeatureType featureType = getFeatureType();
   SimpleFeatureBuilder builder = new SimpleFeatureBuilder(featureType);
   GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor();
   GeometryFactory geometryFactory = new GeometryFactory();
   Double lat = null, lng = null;
   String[] headers = csvFileState.getCSVHeaders();
   for (int i = 0; i < headers.length; i++) {
     String header = headers[i];
     if (i < csvRecord.length) {
       String value = csvRecord[i].trim();
       if (geometryDescriptor != null && header.equals(latField)) {
         lat = Double.valueOf(value);
       } else if (geometryDescriptor != null && header.equals(lngField)) {
         lng = Double.valueOf(value);
       } else {
         builder.set(header, value);
       }
     } else {
       builder.set(header, null);
     }
   }
   if (geometryDescriptor != null && lat != null && lng != null) {
     Coordinate coordinate = new Coordinate(lng, lat);
     Point point = geometryFactory.createPoint(coordinate);
     builder.set(geometryDescriptor.getLocalName(), point);
   }
   return builder.buildFeature(csvFileState.getTypeName() + "-" + recordId);
 }
示例#4
0
  public void testImportCSV() throws Exception {
    File dir = unpack("csv/locations.zip");
    ImportContext context = importer.createContext(new SpatialFile(new File(dir, "locations.csv")));
    assertEquals(1, context.getTasks().size());

    ImportTask task = context.getTasks().get(0);
    assertEquals(ImportTask.State.NO_CRS, task.getState());

    LayerInfo layer = task.getLayer();
    ResourceInfo resource = layer.getResource();
    resource.setSRS("EPSG:4326");

    assertTrue("Item not ready", importer.prep(task));
    assertEquals(ImportTask.State.READY, task.getState());

    context.updated();
    assertEquals(ImportContext.State.PENDING, context.getState());
    importer.run(context);
    assertEquals(ImportContext.State.COMPLETE, context.getState());
    FeatureTypeInfo fti = (FeatureTypeInfo) resource;
    SimpleFeatureType featureType = (SimpleFeatureType) fti.getFeatureType();
    GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor();
    assertNull("Expecting no geometry", geometryDescriptor);
    assertEquals(4, featureType.getAttributeCount());
  }
    private void collectAttributes(
        SimpleFeatureType schema, List<String> retainedAttributes, SimpleFeatureTypeBuilder tb) {
      for (AttributeDescriptor descriptor : schema.getAttributeDescriptors()) {
        // check whether descriptor has been selected in the attribute list
        boolean isInRetainList = true;
        if (retainedAttributes != null) {

          isInRetainList = retainedAttributes.contains(descriptor.getLocalName());
          logger.fine("Checking " + descriptor.getLocalName() + " --> " + isInRetainList);
        }
        if (!isInRetainList || schema.getGeometryDescriptor() == descriptor) {
          continue;
        }

        // build the attribute to return
        AttributeTypeBuilder builder = new AttributeTypeBuilder();
        builder.setName(schema.getName().getLocalPart() + "_" + descriptor.getName());
        builder.setNillable(descriptor.isNillable());
        builder.setBinding(descriptor.getType().getBinding());
        builder.setMinOccurs(descriptor.getMinOccurs());
        builder.setMaxOccurs(descriptor.getMaxOccurs());
        builder.setDefaultValue(descriptor.getDefaultValue());
        builder.setCRS(schema.getCoordinateReferenceSystem());
        AttributeDescriptor intersectionDescriptor =
            builder.buildDescriptor(
                schema.getName().getLocalPart() + "_" + descriptor.getName(), descriptor.getType());
        tb.add(intersectionDescriptor);
        tb.addBinding(descriptor.getType());
      }
    }
示例#6
0
  /**
   * Creates a Query to be executed over a registered ArcSDE layer (whether it is from a table or a
   * spatial view).
   *
   * @param session the session the query works over. As its managed by the calling code its the
   *     calling code responsibility to close it when done.
   * @param fullSchema
   * @param query
   * @param isMultiversioned whether the table is versioned, if so, the default version and current
   *     state will be used for the SeQuery
   * @return
   * @throws IOException
   */
  public static ArcSDEQuery createQuery(
      final ISession session,
      final SimpleFeatureType fullSchema,
      final Query query,
      final FIDReader fidReader,
      final ArcSdeVersionHandler versioningHandler)
      throws IOException {

    Filter filter = query.getFilter();

    LOGGER.fine("Creating new ArcSDEQuery");

    final String typeName = fullSchema.getTypeName();
    final SeTable sdeTable = session.getTable(typeName);
    final SeLayer sdeLayer;
    if (fullSchema.getGeometryDescriptor() == null) {
      sdeLayer = null;
    } else {
      sdeLayer = session.getLayer(typeName);
    }
    // create the set of filters to work over
    final ArcSDEQuery.FilterSet filters =
        new ArcSDEQuery.FilterSet(sdeTable, sdeLayer, filter, fullSchema, null, null, fidReader);

    final Filter unsupportedFilter = filters.getUnsupportedFilter();
    final String[] queryProperties = query.getPropertyNames();
    final SimpleFeatureType querySchema =
        getQuerySchema(queryProperties, unsupportedFilter, fullSchema);

    final String sortByClause = buildSortByClause(fullSchema, query.getSortBy(), fidReader);
    final ArcSDEQuery sdeQuery;
    sdeQuery =
        new ArcSDEQuery(session, querySchema, filters, sortByClause, fidReader, versioningHandler);
    return sdeQuery;
  }
  @Override
  public void createSchema(SimpleFeatureType featureType) throws IOException {
    List<String> header = new ArrayList<String>();

    GeometryDescriptor geometryDescrptor = featureType.getGeometryDescriptor();
    if (geometryDescrptor != null
        && CRS.equalsIgnoreMetadata(
            DefaultGeographicCRS.WGS84, geometryDescrptor.getCoordinateReferenceSystem())
        && geometryDescrptor.getType().getBinding().isAssignableFrom(Point.class)) {
      header.add(this.latField);
      header.add(this.lngField);
    } else {
      throw new IOException(
          "Unable use '"
              + this.latField
              + "' / '"
              + this.lngField
              + "' to represent "
              + geometryDescrptor);
    }
    for (AttributeDescriptor descriptor : featureType.getAttributeDescriptors()) {
      if (descriptor instanceof GeometryDescriptor) continue;
      header.add(descriptor.getLocalName());
    }
    // Write out header, producing an empty file of the correct type
    CsvWriter writer = new CsvWriter(new FileWriter(this.csvFileState.getFile()), ',');
    try {
      writer.writeRecord(header.toArray(new String[header.size()]));
    } finally {
      writer.close();
    }
  }
  /**
   * Creates the resultant features.
   *
   * @param splitGeometries List with the new geometries.
   * @param feature The old feature.
   * @throws OperationNotFoundException
   * @throws TransformException
   */
  private List<SimpleFeature> createSplitFeatures(
      final List<Geometry> splitGeometries, final SimpleFeature feature)
      throws OperationNotFoundException, TransformException {

    final SimpleFeatureType featureType = feature.getFeatureType();
    final CoordinateReferenceSystem featureCrs = featureType.getCoordinateReferenceSystem();

    Class<? extends Geometry> geometryType =
        (Class<? extends Geometry>) featureType.getGeometryDescriptor().getType().getBinding();

    List<SimpleFeature> splitFeatureList = new LinkedList<SimpleFeature>();
    for (Geometry splittedPart : splitGeometries) {

      splittedPart = GeoToolsUtils.reproject(splittedPart, desiredCRS, featureCrs);

      splittedPart = GeometryUtil.adapt(splittedPart, geometryType);
      SimpleFeature newFeature = DataUtilities.template(featureType);
      GeoToolsUtils.copyAttributes(feature, newFeature);
      newFeature.setDefaultGeometry(splittedPart);

      splitFeatureList.add(newFeature);
    }

    return splitFeatureList;
  }
示例#9
0
 @Override
 public void postCreateFeatureType(
     SimpleFeatureType featureType, DatabaseMetaData metadata, String schemaName, Connection cx)
     throws SQLException {
   // figure out if the table has a spatial index and mark the feature type as so
   if (featureType.getGeometryDescriptor() == null) {
     return;
   }
   String idxTableName = featureType.getTypeName() + "_HATBOX";
   ResultSet rs = metadata.getTables(null, schemaName, idxTableName, new String[] {"TABLE"});
   try {
     if (rs.next()) {
       featureType.getGeometryDescriptor().getUserData().put(H2_SPATIAL_INDEX, idxTableName);
     }
   } finally {
     dataStore.closeSafe(rs);
   }
 }
 /** Maps the default geometry attribute regardless of whether they are the same type. */
 @SuppressWarnings("unchecked")
 private static void mapGeometryAttributes(
     SimpleFeatureType sourceSchema,
     SimpleFeatureType targetSchema,
     Map<String, String> queryAttributes) {
   // Now we'll match the geometry on type only. I don't care if it has the same type name.
   GeometryDescriptor defaultGeometry = targetSchema.getGeometryDescriptor();
   if (defaultGeometry == null) {
     return;
   } else if (!queryAttributes.containsKey(defaultGeometry.getName())) {
     // first check source's default geom and see if it matches
     Class<?> binding = sourceSchema.getGeometryDescriptor().getType().getBinding();
     if (defaultGeometry.getType().getBinding().isAssignableFrom(binding)) {
       queryAttributes.put(
           defaultGeometry.getName().getLocalPart(),
           sourceSchema.getGeometryDescriptor().getName().getLocalPart());
     } else {
       // we have to look through all the source attributes looking for a geometry that
       // matches.
       boolean found = false;
       for (int i = 0; i < sourceSchema.getAttributeCount(); i++) {
         AttributeDescriptor source = sourceSchema.getDescriptor(i);
         if (defaultGeometry
             .getType()
             .getBinding()
             .isAssignableFrom(source.getType().getBinding())) {
           queryAttributes.put(
               defaultGeometry.getName().getLocalPart(), source.getName().getLocalPart());
           found = true;
           break;
         }
       }
       // ok so we're going to have to do some transformations. Match default geometries
       // then.
       if (!found) {
         queryAttributes.put(
             defaultGeometry.getName().getLocalPart(),
             sourceSchema.getGeometryDescriptor().getName().getLocalPart());
       }
     }
   }
 }
    public IntersectedFeatureIterator(
        SimpleFeatureIterator delegate,
        SimpleFeatureCollection firstFeatures,
        SimpleFeatureCollection secondFeatures,
        SimpleFeatureType firstFeatureCollectionSchema,
        SimpleFeatureType secondFeatureCollectionSchema,
        List<String> retainAttributesFstPar,
        List<String> retainAttributesSndPar,
        IntersectionMode intersectionMode,
        boolean percentagesEnabled,
        boolean areasEnabled,
        SimpleFeatureBuilder sfb) {
      this.retainAttributesFst = retainAttributesFstPar;
      this.retainAttributesSnd = retainAttributesSndPar;
      this.delegate = delegate;
      this.firstFeatures = firstFeatures;
      this.secondFeatures = secondFeatures;
      this.percentagesEnabled = percentagesEnabled;
      this.areasEnabled = areasEnabled;
      this.intersectionMode = intersectionMode;

      logger.fine("Creating schema");
      // create the geometry attribute descriptor for the result
      //          SimpleFeatureTypeBuilder tb = new SimpleFeatureTypeBuilder();
      if (intersectionMode == IntersectionMode.FIRST) {
        geomType = firstFeatureCollectionSchema.getGeometryDescriptor();
      }
      if (intersectionMode == IntersectionMode.SECOND) {
        geomType = secondFeatureCollectionSchema.getGeometryDescriptor();
      }
      if (intersectionMode == IntersectionMode.INTERSECTION) {
        geomType = getIntersectionType(firstFeatures, secondFeatures);
      }

      this.fb = sfb;
      subFeatureCollection = this.secondFeatures;

      this.dataGeomName = this.firstFeatures.getSchema().getGeometryDescriptor().getLocalName();
      logger.fine("Schema created");
    }
示例#12
0
  public void testImportCSVIndirect() throws Exception {
    File dir = unpack("csv/locations.zip");
    String wsName = getCatalog().getDefaultWorkspace().getName();

    DataStoreInfo h2DataStore = createH2DataStore(wsName, "csvindirecttest");
    SpatialFile importData = new SpatialFile(new File(dir, "locations.csv"));

    ImportContext context = importer.createContext(importData, h2DataStore);
    assertEquals(1, context.getTasks().size());
    ImportTask task = context.getTasks().get(0);

    TransformChain transformChain = task.getTransform();
    transformChain.add(new AttributesToPointGeometryTransform("LAT", "LON"));
    assertEquals(ImportTask.State.NO_CRS, task.getState());

    LayerInfo layer = task.getLayer();
    ResourceInfo resource = layer.getResource();
    resource.setSRS("EPSG:4326");

    assertTrue("Item not ready", importer.prep(task));
    assertEquals(ImportTask.State.READY, task.getState());

    context.updated();
    assertEquals(ImportContext.State.PENDING, context.getState());
    importer.run(context);

    assertEquals(ImportContext.State.COMPLETE, context.getState());
    FeatureTypeInfo fti = (FeatureTypeInfo) resource;
    SimpleFeatureType featureType = (SimpleFeatureType) fti.getFeatureType();
    GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor();
    assertNotNull("Expecting geometry", geometryDescriptor);
    assertEquals("Invalid geometry name", "location", geometryDescriptor.getLocalName());
    assertEquals(3, featureType.getAttributeCount());
    FeatureSource<? extends FeatureType, ? extends Feature> featureSource =
        fti.getFeatureSource(null, null);
    FeatureCollection<? extends FeatureType, ? extends Feature> features =
        featureSource.getFeatures();
    assertEquals(9, features.size());
    FeatureIterator<? extends Feature> featureIterator = features.features();
    assertTrue("Expected features", featureIterator.hasNext());
    SimpleFeature feature = (SimpleFeature) featureIterator.next();
    assertNotNull(feature);
    assertEquals("Invalid city attribute", "Trento", feature.getAttribute("CITY"));
    assertEquals("Invalid number attribute", 140, feature.getAttribute("NUMBER"));
    Object geomAttribute = feature.getAttribute("location");
    assertNotNull("Expected geometry", geomAttribute);
    Point point = (Point) geomAttribute;
    Coordinate coordinate = point.getCoordinate();
    assertEquals("Invalid x coordinate", 11.12, coordinate.x, 0.1);
    assertEquals("Invalid y coordinate", 46.07, coordinate.y, 0.1);
    featureIterator.close();
  }
  /**
   * Applies transform to all geometry attribute.
   *
   * @param feature Feature to be transformed
   * @param schema Schema for target transformation - transform( schema, crs )
   * @param transform MathTransform used to transform coordinates - reproject( crs, crs )
   * @return transformed Feature of type schema
   * @throws TransformException
   * @throws MismatchedDimensionException
   * @throws IllegalAttributeException
   */
  public static SimpleFeature transform(
      SimpleFeature feature, SimpleFeatureType schema, MathTransform transform)
      throws MismatchedDimensionException, TransformException, IllegalAttributeException {
    feature = SimpleFeatureBuilder.copy(feature);

    GeometryDescriptor geomType = schema.getGeometryDescriptor();
    Geometry geom = (Geometry) feature.getAttribute(geomType.getLocalName());

    geom = JTS.transform(geom, transform);

    feature.setAttribute(geomType.getLocalName(), geom);

    return feature;
  }
  // todo - add API doc describing what the method does (nf,ts - 2012-04-23)
  @Override
  public void setUserDataOf(SimpleFeatureType compatibleFeatureType) {

    compatibleFeatureType
        .getUserData()
        .put(PROPERTY_NAME_PLACEMARK_DESCRIPTOR, getClass().getName());

    final org.opengis.feature.type.GeometryDescriptor geometryDescriptor =
        compatibleFeatureType.getGeometryDescriptor();
    if (geometryDescriptor != null) {
      compatibleFeatureType
          .getUserData()
          .put(PROPERTY_NAME_DEFAULT_GEOMETRY, geometryDescriptor.getLocalName());
    }
  }
示例#15
0
  /**
   * if the query has been parsed as just a where clause filter, or has no filter at all, the result
   * count calculation is optimized by selecting a <code>count()</code> single row. If the filter
   * involves any kind of spatial filter, such as BBOX, the calculation can't be optimized by this
   * way, because the ArcSDE Java API throws a <code>"DATABASE LEVEL
   * ERROR OCURRED"</code> exception. So, in this case, a query over the shape field is made and the
   * result is traversed counting the number of rows inside a while loop
   */
  public int calculateResultCount() throws IOException {

    final SimpleFeatureType schema = this.schema;
    final GeometryDescriptor geometryDescriptor = schema.getGeometryDescriptor();

    final String colName;
    if (geometryDescriptor == null) {
      // gemetryless type, use any other column for the query
      colName = schema.getDescriptor(0).getLocalName();
    } else {
      colName = geometryDescriptor.getLocalName();
    }
    final SeQueryInfo qInfo = filters.getQueryInfo(new String[] {colName});

    final SeFilter[] spatialFilters = filters.getSpatialFilters();

    final Command<Integer> countCmd =
        new Command<Integer>() {
          @Override
          public Integer execute(ISession session, SeConnection connection)
              throws SeException, IOException {

            SeQuery query = new SeQuery(connection);
            try {
              versioningHandler.setUpStream(session, query);

              if (spatialFilters != null && spatialFilters.length > 0) {
                query.setSpatialConstraints(SeQuery.SE_OPTIMIZE, true, spatialFilters);
              }

              SeTable.SeTableStats tableStats =
                  query.calculateTableStatistics(
                      "*", SeTable.SeTableStats.SE_COUNT_STATS, qInfo, 0);

              int actualCount = tableStats.getCount();
              return new Integer(actualCount);
            } finally {
              query.close();
            }
          }
        };

    final Integer count = session.issue(countCmd);
    return count.intValue();
  }
示例#16
0
  /** Convenient method to just calculate the resulting bound box of a given query. */
  public static Envelope calculateQueryExtent(
      final ISession session,
      final FeatureTypeInfo typeInfo,
      final Query query,
      final ArcSdeVersionHandler versioningHandler)
      throws IOException {

    final SimpleFeatureType fullSchema = typeInfo.getFeatureType();
    final GeometryDescriptor geometryDescriptor = fullSchema.getGeometryDescriptor();
    if (geometryDescriptor == null) {
      return null;
    }
    final String defaultGeomAttName = geometryDescriptor.getLocalName();

    // we're calculating the bounds, so we'd better be sure and add the
    // spatial column to the query's propertynames
    final Query realQuery = new Query(query);
    realQuery.setPropertyNames(new String[] {defaultGeomAttName});

    final ArcSDEQuery boundsQuery;

    if (typeInfo.isInProcessView()) {
      final SeQueryInfo definitionQuery = typeInfo.getSdeDefinitionQuery();
      final PlainSelect viewSelectStatement = typeInfo.getDefinitionQuery();
      boundsQuery =
          createInprocessViewQuery(
              session, fullSchema, realQuery, definitionQuery, viewSelectStatement);
    } else {
      final FIDReader fidStrategy = typeInfo.getFidStrategy();
      boundsQuery = createQuery(session, fullSchema, realQuery, fidStrategy, versioningHandler);
    }

    Envelope queryExtent = null;
    try {
      Filter unsupportedFilter = boundsQuery.getFilters().getUnsupportedFilter();
      if (unsupportedFilter == Filter.INCLUDE) {
        // we can only use an optimized bounds calculation if the
        // query is fully supported by sde
        queryExtent = boundsQuery.calculateQueryExtent();
      }
    } finally {
      boundsQuery.close();
    }
    return queryExtent;
  }
 private void checkSchemaCorrect(SimpleFeatureType ft, boolean includeProportionColumns) {
   if (includeProportionColumns) {
     assertEquals(5, ft.getAttributeCount());
   } else {
     assertEquals(3, ft.getAttributeCount());
   }
   assertEquals(Point.class, ft.getGeometryDescriptor().getType().getBinding());
   assertEquals(
       Integer.class, ft.getDescriptor(PointStackerProcess.ATTR_COUNT).getType().getBinding());
   assertEquals(
       Integer.class,
       ft.getDescriptor(PointStackerProcess.ATTR_COUNT_UNIQUE).getType().getBinding());
   if (includeProportionColumns) {
     assertEquals(
         Double.class,
         ft.getDescriptor(PointStackerProcess.ATTR_NORM_COUNT).getType().getBinding());
     assertEquals(
         Double.class,
         ft.getDescriptor(PointStackerProcess.ATTR_NORM_COUNT_UNIQUE).getType().getBinding());
   }
 }
  public static SimpleFeatureType retype(SimpleFeatureType original, List<String> types) {
    SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder();

    // initialize the builder
    b.init(original);

    // clear the attributes
    b.attributes().clear();

    // add attributes in order
    for (int i = 0; i < types.size(); i++) {
      b.add(original.getDescriptor(types.get(i)));
    }

    // handle default geometry
    GeometryDescriptor defaultGeometry = original.getGeometryDescriptor();
    if (defaultGeometry != null && types.contains(defaultGeometry.getLocalName())) {
      b.setDefaultGeometry(defaultGeometry.getLocalName());
    }

    return b.buildFeatureType();
  }
示例#19
0
  private Query reprojectFilter(Query query) throws IOException {
    final Filter originalFilter = query.getFilter() != null ? query.getFilter() : Filter.INCLUDE;
    if (Filter.INCLUDE.equals(originalFilter)) {
      return query;
    }

    final SimpleFeatureType nativeFeatureType = getSchema();
    final GeometryDescriptor geom = nativeFeatureType.getGeometryDescriptor();
    // if no geometry involved, no reprojection needed
    if (geom == null) {
      return query;
    }

    final FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2(null);

    try {
      CoordinateReferenceSystem nativeCRS = geom.getCoordinateReferenceSystem();

      // now we apply a default to all geometries and bbox in the filter
      DefaultCRSFilterVisitor defaultCRSVisitor = new DefaultCRSFilterVisitor(ff, nativeCRS);
      final Filter defaultedFilter = (Filter) originalFilter.accept(defaultCRSVisitor, null);

      // and then we reproject all geometries so that the datastore
      // receives
      // them in the native projection system (or the forced one, in case
      // of force)
      ReprojectingFilterVisitor reprojectingVisitor =
          new ReprojectingFilterVisitor(ff, nativeFeatureType);
      final Filter reprojectedFilter = (Filter) defaultedFilter.accept(reprojectingVisitor, null);

      Query reprojectedQuery = new Query(query);
      reprojectedQuery.setFilter(reprojectedFilter);
      return reprojectedQuery;
    } catch (Exception e) {
      throw new DataSourceException("Had troubles handling filter reprojection...", e);
    }
  }
  // FIXME copy/paste from FeatureDataAdater ... abstract!
  @Override
  protected List<IndexFieldHandler<SimpleFeature, ? extends CommonIndexValue, Object>>
      getDefaultTypeMatchingHandlers(final Object typeObj) {
    if ((typeObj != null) && (typeObj instanceof SimpleFeatureType)) {
      final SimpleFeatureType internalType = (SimpleFeatureType) typeObj;

      nativeFieldHandlers = typeToFieldHandlers((SimpleFeatureType) typeObj);
      final List<IndexFieldHandler<SimpleFeature, ? extends CommonIndexValue, Object>>
          defaultHandlers =
              new ArrayList<IndexFieldHandler<SimpleFeature, ? extends CommonIndexValue, Object>>();
      final IndexFieldHandler<SimpleFeature, Time, Object> timeHandler =
          getTimeRangeHandler(internalType);
      if (timeHandler != null) {
        defaultHandlers.add(timeHandler);
      }

      defaultHandlers.add(
          new FeatureGeometryHandler(internalType.getGeometryDescriptor(), fieldVisiblityHandler));
      return defaultHandlers;
    }
    // LOGGER.warn("Simple Feature Type could not be used for handling the
    // indexed data");
    return super.getDefaultTypeMatchingHandlers(featureType);
  }
示例#21
0
  /**
   * Writing test that only engages against a remote geoserver.
   *
   * <p>Makes reference to the standard featureTypes that geoserver ships with. NOTE: Ignoring this
   * test for now because it edits topp:states and GeoServer doesn't return the correct Feature IDs
   * on transactions against shapefiles
   */
  @Test
  @Ignore
  public void testWrite()
      throws NoSuchElementException, IllegalFilterException, IOException,
          IllegalAttributeException {
    if (url == null) return;

    Map m = new HashMap();
    m.put(WFSDataStoreFactory.URL.key, url);
    m.put(WFSDataStoreFactory.TIMEOUT.key, new Integer(10000000));
    DataStore post = (WFS_1_0_0_DataStore) (new WFSDataStoreFactory()).createDataStore(m);
    String typename = TO_EDIT_TYPE;
    SimpleFeatureType ft = post.getSchema(typename);
    SimpleFeatureSource fs = post.getFeatureSource(typename);
    class Watcher implements FeatureListener {
      public int count = 0;

      public void changed(FeatureEvent featureEvent) {
        System.out.println("Event " + featureEvent);
        count++;
      }
    }
    Watcher watcher = new Watcher();
    fs.addFeatureListener(watcher);

    Id startingFeatures = createFidFilter(fs);
    FilterFactory2 filterFac = CommonFactoryFinder.getFilterFactory2(GeoTools.getDefaultHints());
    try {
      GeometryFactory gf = new GeometryFactory();
      MultiPolygon mp =
          gf.createMultiPolygon(
              new Polygon[] {
                gf.createPolygon(
                    gf.createLinearRing(
                        new Coordinate[] {
                          new Coordinate(-88.071564, 37.51099),
                          new Coordinate(-88.467644, 37.400757),
                          new Coordinate(-90.638329, 42.509361),
                          new Coordinate(-89.834618, 42.50346),
                          new Coordinate(-88.071564, 37.51099)
                        }),
                    new LinearRing[] {})
              });
      mp.setUserData("http://www.opengis.net/gml/srs/epsg.xml#" + EPSG_CODE);

      PropertyName geometryAttributeExpression =
          filterFac.property(ft.getGeometryDescriptor().getLocalName());
      PropertyIsNull geomNullCheck = filterFac.isNull(geometryAttributeExpression);
      Query query = new Query(typename, filterFac.not(geomNullCheck), 1, Query.ALL_NAMES, null);
      SimpleFeatureIterator inStore = fs.getFeatures(query).features();

      SimpleFeature f, f2;
      try {
        SimpleFeature feature = inStore.next();

        SimpleFeature copy = SimpleFeatureBuilder.deep(feature);
        SimpleFeature copy2 = SimpleFeatureBuilder.deep(feature);

        f = SimpleFeatureBuilder.build(ft, copy.getAttributes(), null);
        f2 = SimpleFeatureBuilder.build(ft, copy2.getAttributes(), null);
        assertFalse("Max Feature failed", inStore.hasNext());
      } finally {
        inStore.close();
      }

      org.geotools.util.logging.Logging.getLogger("org.geotools.data.wfs").setLevel(Level.FINE);
      SimpleFeatureCollection inserts = DataUtilities.collection(new SimpleFeature[] {f, f2});
      Id fp = WFSDataStoreWriteOnlineTest.doInsert(post, ft, inserts);

      // / okay now count ...
      FeatureReader<SimpleFeatureType, SimpleFeature> count =
          post.getFeatureReader(new Query(ft.getTypeName()), Transaction.AUTO_COMMIT);
      int i = 0;
      while (count.hasNext() && i < 3) {
        f = count.next();
        i++;
      }
      count.close();

      WFSDataStoreWriteOnlineTest.doDelete(post, ft, fp);
      WFSDataStoreWriteOnlineTest.doUpdate(post, ft, ATTRIBUTE_TO_EDIT, NEW_EDIT_VALUE);
      // assertFalse("events not fired", watcher.count == 0);
    } finally {
      try {
        ((SimpleFeatureStore) fs).removeFeatures(filterFac.not(startingFeatures));
      } catch (Exception e) {
        System.out.println(e);
      }
    }
  }
 private static GeometryDescriptor getGeometryAttDescriptor(SimpleFeatureType schema) {
   return schema.getGeometryDescriptor();
 }
示例#23
0
  /** Executes the export command using the provided options. */
  @Override
  protected final void runInternal(GeogigCLI cli) throws IOException {
    if (args.isEmpty()) {
      printUsage(cli);
      throw new CommandFailedException();
    }

    String path = args.get(0);
    String tableName = args.get(1);

    checkParameter(tableName != null && !tableName.isEmpty(), "No table name specified");

    DataStore dataStore = getDataStore();

    ObjectId featureTypeId = null;
    if (!Arrays.asList(dataStore.getTypeNames()).contains(tableName)) {
      SimpleFeatureType outputFeatureType;
      if (sFeatureTypeId != null) {
        // Check the feature type id string is a correct id
        Optional<ObjectId> id =
            cli.getGeogig().command(RevParse.class).setRefSpec(sFeatureTypeId).call();
        checkParameter(id.isPresent(), "Invalid feature type reference", sFeatureTypeId);
        TYPE type = cli.getGeogig().command(ResolveObjectType.class).setObjectId(id.get()).call();
        checkParameter(
            type.equals(TYPE.FEATURETYPE),
            "Provided reference does not resolve to a feature type: ",
            sFeatureTypeId);
        outputFeatureType =
            (SimpleFeatureType)
                cli.getGeogig()
                    .command(RevObjectParse.class)
                    .setObjectId(id.get())
                    .call(RevFeatureType.class)
                    .get()
                    .type();
        featureTypeId = id.get();
      } else {
        try {
          SimpleFeatureType sft = getFeatureType(path, cli);
          outputFeatureType =
              new SimpleFeatureTypeImpl(
                  new NameImpl(tableName),
                  sft.getAttributeDescriptors(),
                  sft.getGeometryDescriptor(),
                  sft.isAbstract(),
                  sft.getRestrictions(),
                  sft.getSuper(),
                  sft.getDescription());
        } catch (GeoToolsOpException e) {
          throw new CommandFailedException("No features to export.", e);
        }
      }
      try {
        dataStore.createSchema(outputFeatureType);
      } catch (IOException e) {
        throw new CommandFailedException("Cannot create new table in database", e);
      }
    } else {
      if (!overwrite) {
        throw new CommandFailedException("The selected table already exists. Use -o to overwrite");
      }
    }

    SimpleFeatureSource featureSource = dataStore.getFeatureSource(tableName);
    if (!(featureSource instanceof SimpleFeatureStore)) {
      throw new CommandFailedException("Can't write to the selected table");
    }
    SimpleFeatureStore featureStore = (SimpleFeatureStore) featureSource;
    if (overwrite) {
      try {
        featureStore.removeFeatures(Filter.INCLUDE);
      } catch (IOException e) {
        throw new CommandFailedException("Error truncating table: " + e.getMessage(), e);
      }
    }
    ExportOp op =
        cli.getGeogig()
            .command(ExportOp.class)
            .setFeatureStore(featureStore)
            .setPath(path)
            .setFilterFeatureTypeId(featureTypeId)
            .setAlter(alter);
    if (defaultType) {
      op.exportDefaultFeatureType();
    }
    try {
      op.setProgressListener(cli.getProgressListener()).call();
    } catch (IllegalArgumentException iae) {
      throw new org.locationtech.geogig.cli.InvalidParameterException(iae.getMessage(), iae);
    } catch (GeoToolsOpException e) {
      switch (e.statusCode) {
        case MIXED_FEATURE_TYPES:
          throw new CommandFailedException(
              "The selected tree contains mixed feature types. Use --defaulttype or --featuretype <feature_type_ref> to export.",
              e);
        default:
          throw new CommandFailedException("Could not export. Error:" + e.statusCode.name(), e);
      }
    }

    cli.getConsole().println(path + " exported successfully to " + tableName);
  }
示例#24
0
  /**
   * Applies a set of heuristics to find which target attribute corresponds to a certain input
   * attribute
   *
   * @param sourceType
   * @param targetType
   * @return
   */
  Map<String, String> buildAttributeMapping(
      SimpleFeatureType sourceType, SimpleFeatureType targetType) {
    // look for the typical manglings. For example, if the target is a
    // shapefile store it will move the geometry and name it the_geom

    // collect the source names
    Set<String> sourceNames = new HashSet<String>();
    for (AttributeDescriptor sd : sourceType.getAttributeDescriptors()) {
      sourceNames.add(sd.getLocalName());
    }

    // first check if we have been kissed by sheer luck and the names are
    // the same
    Map<String, String> result = new HashMap<String, String>();
    for (String name : sourceNames) {
      if (targetType.getDescriptor(name) != null) {
        result.put(name, name);
      }
    }
    sourceNames.removeAll(result.keySet());

    // then check for simple case difference (Oracle case)
    for (String name : sourceNames) {
      for (AttributeDescriptor td : targetType.getAttributeDescriptors()) {
        if (td.getLocalName().equalsIgnoreCase(name)) {
          result.put(name, td.getLocalName());
          break;
        }
      }
    }
    sourceNames.removeAll(result.keySet());

    // then check attribute names being cut (another Oracle case)
    for (String name : sourceNames) {
      String loName = name.toLowerCase();
      for (AttributeDescriptor td : targetType.getAttributeDescriptors()) {
        String tdName = td.getLocalName().toLowerCase();
        if (loName.startsWith(tdName)) {
          result.put(name, td.getLocalName());
          break;
        }
      }
    }
    sourceNames.removeAll(result.keySet());

    // consider the shapefile geometry descriptor mangling
    if (targetType.getGeometryDescriptor() != null
        && "the_geom".equals(targetType.getGeometryDescriptor().getLocalName())
        && !"the_geom".equalsIgnoreCase(sourceType.getGeometryDescriptor().getLocalName())) {
      result.put(sourceType.getGeometryDescriptor().getLocalName(), "the_geom");
    }

    // and finally we return with as much as we can match
    if (!sourceNames.isEmpty()) {
      LOGGER.warning(
          "Could not match the following attributes "
              + sourceNames
              + " to the target feature type ones: "
              + targetType);
    }
    return result;
  }
示例#25
0
  /**
   * Creates a query to be executed over an inprocess view (a view defined by a SQL SELECT statement
   * at the datastore configuration)
   *
   * @return the newly created ArcSDEQuery.
   * @throws IOException see <i>throws DataSourceException</i> bellow.
   * @see ArcSDEDataStore#registerView(String, PlainSelect)
   */
  public static ArcSDEQuery createInprocessViewQuery(
      final ISession session,
      final SimpleFeatureType fullSchema,
      final Query query,
      final SeQueryInfo definitionQuery,
      final PlainSelect viewSelectStatement)
      throws IOException {

    final Filter filter = query.getFilter();
    final FIDReader fidReader = FIDReader.NULL_READER;

    // the first table has to be the main layer
    final SeSqlConstruct construct;
    try {
      construct = definitionQuery.getConstruct();
    } catch (SeException e) {
      throw new ArcSdeException("shouldn't happen: " + e.getMessage(), e);
    }
    final String[] tables = construct.getTables();
    String layerName = tables[0];
    // @REVISIT: HACK HERE!, look how to get rid of alias in
    // query info, or
    // better stop using queryinfo as definition query and use
    // the PlainSelect,
    // then construct the query info dynamically when needed?
    if (layerName.indexOf(" AS") > 0) {
      layerName = layerName.substring(0, layerName.indexOf(" AS"));
    }
    final SeTable sdeTable = session.getTable(layerName);
    final SeLayer sdeLayer;
    if (fullSchema.getGeometryDescriptor() == null) {
      sdeLayer = null;
    } else {
      sdeLayer = session.getLayer(layerName);
    }

    // create the set of filters to work over
    final ArcSDEQuery.FilterSet filters =
        new ArcSDEQuery.FilterSet(
            sdeTable,
            sdeLayer,
            filter,
            fullSchema,
            definitionQuery,
            viewSelectStatement,
            fidReader);

    final Filter unsupportedFilter = filters.getUnsupportedFilter();
    final String[] queryProperties = query.getPropertyNames();
    final SimpleFeatureType querySchema =
        getQuerySchema(queryProperties, unsupportedFilter, fullSchema);

    final ArcSDEQuery sdeQuery;
    sdeQuery =
        new ArcSDEQuery(
            session,
            querySchema,
            filters,
            null,
            fidReader,
            ArcSdeVersionHandler.NONVERSIONED_HANDLER);
    return sdeQuery;
  }
示例#26
0
  /*
   * Alte methode versuch wie shapefile zu importieren. könnte teilweise noch
   * hilfreich sein <- war doch richtig..
   */
  public void readFile(String tableName, File file) throws IOException {

    final InputStream inputstream = new FileInputStream(file);
    GeometryJSON gjson = new GeometryJSON(15);
    FeatureJSON fjson = new FeatureJSON(gjson);

    // System.out.println(fjson.readCRS(inputstream));

    FeatureCollection<SimpleFeatureType, SimpleFeature> jfc =
        fjson.readFeatureCollection(inputstream);

    final SimpleFeatureType jsonSchema = jfc.getSchema();

    final SimpleFeatureTypeBuilder dbSftBuilder = new SimpleFeatureTypeBuilder();
    final List<AttributeDescriptor> ads = jsonSchema.getAttributeDescriptors();
    for (final AttributeDescriptor ad : ads) {

      final String n = ad.getName().toString();

      final Name name = new NameImpl(n.toUpperCase());
      final AttributeDescriptor t =
          new AttributeDescriptorImpl(
              ad.getType(),
              name,
              ad.getMinOccurs(),
              ad.getMaxOccurs(),
              ad.isNillable(),
              ad.getDefaultValue());
      dbSftBuilder.add(t);
    }
    // TODO: EPSG!!
    this.epsg = "";

    this.geomType = jsonSchema.getGeometryDescriptor().getType().getBinding().getName();

    dbSftBuilder.setName(new NameImpl(tableName));
    final SimpleFeatureType dbSchema = dbSftBuilder.buildFeatureType();
    List<SimpleFeature> sfList = new ArrayList<SimpleFeature>();
    // Getting Geomtries and Attributes right
    try (FeatureIterator<SimpleFeature> jsonFeatures = jfc.features()) {

      String bar = null;
      while (jsonFeatures.hasNext()) {
        final SimpleFeature sf = jsonFeatures.next();
        String foo = sf.getDefaultGeometry().getClass().getName();

        // falls verschiedene Geoemtrien im JSON sind nur die erste
        // Anzeigen, alle anderen ignorieren.
        if (bar != null && !foo.equals(bar)) {
          System.out.println(foo);
          continue;
        }

        SimpleFeatureBuilder dbSfBuilder = new SimpleFeatureBuilder(dbSchema);

        for (final AttributeDescriptor ad : jsonSchema.getAttributeDescriptors()) {

          final String attr = ad.getLocalName();
          final String name = attr.toUpperCase();
          Object obj = sf.getAttribute(attr);
          dbSfBuilder.set(name, obj);
        }
        final SimpleFeature of = dbSfBuilder.buildFeature(null);
        sfList.add(of);
        bar = foo;
      }
    }

    FeatureCollection<SimpleFeatureType, SimpleFeature> collection =
        new ListFeatureCollection(dbSchema, sfList);
    dbCollection = collection;
  }