コード例 #1
0
  public InterestingPointVisitor(FeatureSource surface, double radius, double hillHeight) {
    surfaceFeatureSource = surface;
    this.radius = radius;
    this.hillHeight = hillHeight;

    viewAngles = DefaultFeatureCollections.newCollection();

    try {
      viewAngleType = DataUtilities.createType("ViewAngles", "geometry:Polygon,daukstis:Double,");
    } catch (SchemaException e) {
      e.printStackTrace();
    }
  }
コード例 #2
0
  /**
   * Returns a {@link SimpleFeatureType} whichs a "view" of the <code>fullSchema</code> adapted as
   * per the required query property names.
   *
   * @param queryProperties the query containing the list of property names required by the output
   *     schema and the {@link Filter query predicate} from which to fetch required properties to be
   *     used at runtime filter evaluation.
   * @param unsupportedFilter
   * @param fullSchema a feature type representing an ArcSDE layer full schema.
   * @return a FeatureType derived from <code>fullSchema</code> which contains the property names
   *     required by the <code>query</code> and the ones referenced in the query filter.
   * @throws DataSourceException
   */
  public static SimpleFeatureType getQuerySchema(
      final String[] queryProperties, Filter unsupportedFilter, final SimpleFeatureType fullSchema)
      throws DataSourceException {
    // guess which properties need to actually be retrieved.
    final List<String> queryColumns =
        getQueryColumns(queryProperties, unsupportedFilter, fullSchema);
    final String[] attNames = queryColumns.toArray(new String[queryColumns.size()]);

    try {
      // create the resulting feature type for the real attributes to
      // retrieve
      SimpleFeatureType querySchema = DataUtilities.createSubType(fullSchema, attNames);
      return querySchema;
    } catch (SchemaException ex) {
      throw new DataSourceException(
          "Some requested attributes do not match the table schema: " + ex.getMessage(), ex);
    }
  }
コード例 #3
0
  // Jody - Recomend moving to the following
  // When we are ready for CoordinateSystem support
  public FeatureReader<SimpleFeatureType, SimpleFeature> getFeatureReader(
      Query query, Transaction transaction) throws IOException {
    Filter filter = query.getFilter();
    String typeName = query.getTypeName();
    String propertyNames[] = query.getPropertyNames();

    if (filter == null) {
      throw new NullPointerException(
          "getFeatureReader requires Filter: " + "did you mean Filter.INCLUDE?");
    }
    if (typeName == null) {
      throw new NullPointerException(
          "getFeatureReader requires typeName: "
              + "use getTypeNames() for a list of available types");
    }
    if (transaction == null) {
      throw new NullPointerException(
          "getFeatureReader requires Transaction: "
              + "did you mean to use Transaction.AUTO_COMMIT?");
    }
    SimpleFeatureType featureType = getSchema(query.getTypeName());

    if (propertyNames != null || query.getCoordinateSystem() != null) {
      try {
        featureType =
            DataUtilities.createSubType(featureType, propertyNames, query.getCoordinateSystem());
      } catch (SchemaException e) {
        LOGGER.log(Level.FINEST, e.getMessage(), e);
        throw new DataSourceException("Could not create Feature Type for query", e);
      }
    }
    if (filter == Filter.EXCLUDE || filter.equals(Filter.EXCLUDE)) {
      return new EmptyFeatureReader<SimpleFeatureType, SimpleFeature>(featureType);
    }
    // GR: allow subclases to implement as much filtering as they can,
    // by returning just it's unsupperted filter
    filter = getUnsupportedFilter(typeName, filter);
    if (filter == null) {
      throw new NullPointerException(
          "getUnsupportedFilter shouldn't return null. Do you mean Filter.INCLUDE?");
    }

    // There are cases where the readers have to lock.  Take shapefile for example.  Getting a
    // Reader causes
    // the file to be locked.  However on a commit TransactionStateDiff locks before a writer is
    // obtained.  In order to
    // prevent deadlocks either the diff has to obtained first or the reader has to be obtained
    // first.
    // Because shapefile writes to a buffer first the actual write lock is not flipped until the
    // transaction has most of the work
    // done.  As a result I suggest getting the diff first then getting the reader.
    // JE
    Diff diff = null;
    if (transaction != Transaction.AUTO_COMMIT) {
      TransactionStateDiff state = state(transaction);
      if (state != null) {
        diff = state.diff(typeName);
      }
    }

    // This calls our subclass "simple" implementation
    // All other functionality will be built as a reader around
    // this class
    //
    FeatureReader<SimpleFeatureType, SimpleFeature> reader = getFeatureReader(typeName, query);

    if (diff != null)
      reader =
          new DiffFeatureReader<SimpleFeatureType, SimpleFeature>(reader, diff, query.getFilter());

    if (!filter.equals(Filter.INCLUDE)) {
      reader = new FilteringFeatureReader<SimpleFeatureType, SimpleFeature>(reader, filter);
    }

    if (!featureType.equals(reader.getFeatureType())) {
      LOGGER.fine("Recasting feature type to subtype by using a ReTypeFeatureReader");
      reader = new ReTypeFeatureReader(reader, featureType, false);
    }

    if (query.getMaxFeatures() != Query.DEFAULT_MAX) {
      reader =
          new MaxFeatureReader<SimpleFeatureType, SimpleFeature>(reader, query.getMaxFeatures());
    }

    return reader;
  }
コード例 #4
0
  public KPTShapefileWriter(String directory, String fileName) {
    this.directory = directory;
    this.fileName = fileName;
    crs = DefaultGeographicCRS.WGS84;

    try {
      TYPE =
          DataUtilities.createType(
              "Location",
              "" //
                  + "cadastral_number:String," // <- a String attribute
                  + "state:String," // a number attribute
                  + "date_created:String," //
                  + "area:String," //
                  + "area_unit:String," //
                  + "name:String," //
                  + "location_in_bounds:String," //
                  + "address_okato:String," //
                  + "address_kladr:String," //
                  + "address_region:String," //
                  + "address_district_name:String," //
                  + "address_district_type:String," //
                  + "address_city_name:String," //
                  + "address_city_type:String," //
                  + "address_locality_name:String," //
                  + "address_locality_type:String," //
                  + "address_street_name:String," //
                  + "address_street_type:String," //
                  + "address_level_1_type:String," //
                  + "address_level_1_value:String," //
                  + "addess_note:String," //
                  + "category:String," //
                  + "utilization:String," //
                  + "utilization_by_doc:String," //
                  + "cadastral_cost:String," //
                  + "cadastral_unit:String," //
                  + "location:Polygon:srid=4326" // <- the geometry attribute: Point type
              );
    } catch (SchemaException e) {
      e.printStackTrace();
    }

    File newFile =
        new File(directory + "/" + fileName.substring(0, fileName.lastIndexOf(".")) + ".shp");

    ShapefileDataStoreFactory dataStoreFactory = new ShapefileDataStoreFactory();

    Map<String, Serializable> params = new HashMap<>();
    try {
      params.put("url", newFile.toURI().toURL());
    } catch (MalformedURLException e) {
      e.printStackTrace();
    }
    params.put("create spatial index", Boolean.TRUE);

    try {
      newDataStore = (ShapefileDataStore) dataStoreFactory.createNewDataStore(params);
    } catch (IOException e) {
      e.printStackTrace();
    }
    try {
      newDataStore.createSchema(TYPE);
    } catch (IOException e) {
      e.printStackTrace();
    }
  }