@Test
  public void testAttributeReaderIndexed() throws IOException {
    URL u = TestData.url(TestCaseSupport.class, SHPFILE);
    File shpFile = DataUtilities.urlToFile(u);

    // open the test shapefile
    // creates both indexed and regular shapefile data store
    ShapefileDataStore indexedstore = new ShapefileDataStore(shpFile.toURI().toURL());

    // get a random feature id from one of the stores
    SimpleFeatureIterator it = indexedstore.getFeatureSource().getFeatures().features();
    FeatureId fid = it.next().getIdentifier();
    it.close();

    // query the datastore
    FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);
    Filter idFilter = ff.id(Collections.singleton(fid));
    final Query query =
        new Query(
            indexedstore.getSchema().getName().getLocalPart(), idFilter, new String[] {testColumn});
    final SimpleFeatureCollection indexedfeatures =
        indexedstore.getFeatureSource().getFeatures(query);

    // compare the results
    SimpleFeatureIterator indexIterator = indexedfeatures.features();
    SimpleFeature indexedFeature = indexIterator.next();
    indexIterator.close();

    // get the value of the duplicate column & compare it against expectation
    assertEquals(expectedValue, indexedFeature.getAttribute(testColumn));

    // cleanup
    indexedstore.dispose();
  }
    private LineSegment extractShorelineInterect(SimpleFeature feature) {

      Object sceObject = ((Double) feature.getAttribute(Constants.SCE_ATTR));
      Object nsdObject = feature.getAttribute(Constants.NSD_ATTR);

      Geometry geometry = (Geometry) feature.getDefaultGeometry();
      Coordinate[] coordinates = geometry.getCoordinates();
      LineSegment segment = new LineSegment(coordinates[0], coordinates[1]);

      double length = segment.getLength();

      double sce = sceObject instanceof Number ? ((Number) sceObject).doubleValue() : Double.NaN;
      double nsd = nsdObject instanceof Number ? ((Number) nsdObject).doubleValue() : Double.NaN;

      if (sce == sce && nsd == nsd) {
        return extractShorelineInterectAndCheckLength(segment, nsd, sce);
      } else {
        if (sce != sce && nsd != nsd) {
          return extractShorelineInterectAndCheckLength(segment, 0, length);
        }
        if (sce != sce) {
          sce = length - nsd;
        } else /* if nsd != nsd */ {
          nsd = length - sce;
        }
        return extractShorelineInterectAndCheckLength(segment, nsd, sce);
      }
    }
Beispiel #3
0
  public void addBuildings(final String buildingShapeFileName) {

    final GeometryFactory geometryFactory = new GeometryFactory();
    final WKTReader wktReader = new WKTReader(geometryFactory);

    final ShapeFileReader shapeFileReader = new ShapeFileReader();
    shapeFileReader.readFileAndInitialize(buildingShapeFileName);
    final Collection<SimpleFeature> features = shapeFileReader.getFeatureSet();

    for (SimpleFeature ft : features) {

      try {
        final Geometry geometry = wktReader.read((ft.getAttribute("the_geom")).toString());
        final String buildingType = ft.getAttribute("ANDAMAL_1T").toString();
        final int buildingSize = Integer.valueOf(ft.getAttribute("AREA").toString());
        final Building building = new Building(geometry, buildingSize);
        building.setBuildingType(buildingType);

        for (Zone zone : this.id2zone.values()) {
          if (zone.getGeometry() != null && zone.getGeometry().intersects(geometry)) {
            zone.addBuilding(building);
            break;
          }
        }

      } catch (ParseException e) {
        throw new RuntimeException(e);
      }
    }
  }
  /**
   * Extract the coordinate of a FeatureCollection in a HashMap with an ID as a key.
   *
   * @param nStaz
   * @param collection
   * @throws Exception if a fiel of elevation isn't the same of the collection
   */
  private LinkedHashMap<Integer, Coordinate> getCoordinate(
      int nStaz, SimpleFeatureCollection collection, String idField) throws Exception {
    LinkedHashMap<Integer, Coordinate> id2CoordinatesMap = new LinkedHashMap<Integer, Coordinate>();
    FeatureIterator<SimpleFeature> iterator = collection.features();
    Coordinate coordinate = null;
    try {
      while (iterator.hasNext()) {
        SimpleFeature feature = iterator.next();
        int name = ((Number) feature.getAttribute(idField)).intValue();
        coordinate = ((Geometry) feature.getDefaultGeometry()).getCentroid().getCoordinate();
        double z = 0;
        if (fPointZ != null) {
          try {
            z = ((Number) feature.getAttribute(fPointZ)).doubleValue();
          } catch (NullPointerException e) {
            pm.errorMessage(msg.message("kriging.noPointZ"));
            throw new Exception(msg.message("kriging.noPointZ"));
          }
        }
        coordinate.z = z;
        id2CoordinatesMap.put(name, coordinate);
      }
    } finally {
      iterator.close();
    }

    return id2CoordinatesMap;
  }
  /**
   * Check that a result set contains a stacked point in the right cell with expected attribute
   * values. Because it's not known in advance what the actual location of a stacked point will be,
   * a nearest-point strategy is used.
   *
   * @param result
   * @param coordinate
   * @param i
   * @param j
   */
  private void checkResultPoint(
      SimpleFeatureCollection result,
      Coordinate testPt,
      int expectedCount,
      int expectedCountUnique,
      Double expectedProportion,
      Double expectedProportionUnique) {

    SimpleFeature f = getResultPoint(result, testPt);
    assertNotNull(f);

    /** Find closest point to loc pt, then check that the attributes match */
    int count = (Integer) f.getAttribute(PointStackerProcess.ATTR_COUNT);
    int countunique = (Integer) f.getAttribute(PointStackerProcess.ATTR_COUNT_UNIQUE);
    double normCount = Double.NaN;
    double normCountUnique = Double.NaN;
    if (expectedProportion != null) {
      normCount = (Double) f.getAttribute(PointStackerProcess.ATTR_NORM_COUNT);
      normCountUnique = (Double) f.getAttribute(PointStackerProcess.ATTR_NORM_COUNT_UNIQUE);
    }

    assertEquals(expectedCount, count);
    assertEquals(expectedCountUnique, countunique);
    if (expectedProportion != null) assertEquals(expectedProportion, normCount, 0.0001);
    if (expectedProportionUnique != null)
      assertEquals(expectedProportionUnique, normCountUnique, 0.0001);
  }
  public void testConnectionFailure() throws Exception {
    // create mock objects that will simulate a connection failure
    GSSClient client = createMock(GSSClient.class);
    expect(client.getCentralRevision((QName) anyObject()))
        .andThrow(new IOException("Host unreachable"));
    replay(client);
    GSSClientFactory factory = createMock(GSSClientFactory.class);
    expect(factory.createClient(new URL("http://localhost:8081/geoserver/ows"), null, null))
        .andReturn(client);
    replay(factory);

    synch.clientFactory = factory;

    // perform synch
    Date start = new Date();
    synch.synchronizeOustandlingLayers();
    Date end = new Date();

    // check we stored the last failure marker
    SimpleFeature f =
        getSingleFeature(fsUnitTables, ff.equal(ff.property("table_id"), ff.literal(1), false));
    Date lastFailure = (Date) f.getAttribute("last_failure");
    assertNotNull(lastFailure);
    assertTrue(lastFailure.compareTo(start) >= 0 && lastFailure.compareTo(end) <= 0);

    // check we marked the unit as failed
    f = getSingleFeature(fsUnits, ff.equal(ff.property("unit_name"), ff.literal("unit1"), false));
    assertTrue((Boolean) f.getAttribute("errors"));
  }
Beispiel #7
0
  public void testImportIntoDatabaseWithEncoding() throws Exception {
    Catalog cat = getCatalog();

    DataStoreInfo ds = createH2DataStore(cat.getDefaultWorkspace().getName(), "ming");

    File dir = tmpDir();
    unpack("shape/ming_time.zip", dir);

    ImportContext context = importer.createContext(new Directory(dir), ds);
    assertEquals(1, context.getTasks().size());

    context.getTasks().get(0).getData().setCharsetEncoding("UTF-8");
    importer.run(context);

    FeatureTypeInfo info = (FeatureTypeInfo) context.getTasks().get(0).getLayer().getResource();
    FeatureSource<? extends FeatureType, ? extends Feature> fs = info.getFeatureSource(null, null);
    FeatureCollection<? extends FeatureType, ? extends Feature> features = fs.getFeatures();
    FeatureIterator<? extends Feature> it = features.features();
    assertTrue(it.hasNext());
    SimpleFeature next = (SimpleFeature) it.next();
    // let's test some attributes to see if they were digested properly
    String type_ch = (String) next.getAttribute("type_ch");
    assertEquals("卫", type_ch);
    String name_ch = (String) next.getAttribute("name_ch");
    assertEquals("杭州前卫", name_ch);

    it.close();
  }
  @Test
  public void testGetFeaturesFeatureSource() throws Exception {
    // check the schemas in feature source and feature collection
    SimpleFeatureSource fs = rts.getFeatureSource(RENAMED);
    assertEquals(primitive, fs.getSchema());
    SimpleFeatureCollection fc = fs.getFeatures();
    assertEquals(primitive, fc.getSchema());
    assertTrue(fc.size() > 0);

    // make sure the feature schema is good as well
    FeatureIterator<SimpleFeature> it = fc.features();
    SimpleFeature sf = it.next();
    it.close();

    assertEquals(primitive, sf.getFeatureType());

    // check the feature ids have been renamed as well
    assertTrue(
        "Feature id has not been renamed, it's still " + sf.getID(),
        sf.getID().startsWith(RENAMED));

    // check mappings occurred
    assertEquals("description-f001", sf.getAttribute("description"));
    assertTrue(
        new WKTReader()
            .read("MULTIPOINT(39.73245 2.00342)")
            .equalsExact((Geometry) sf.getAttribute("pointProperty")));
    assertEquals(new Long(155), sf.getAttribute("intProperty"));
    assertNull(sf.getAttribute("newProperty"));
  }
 private void writeImplementation(SimpleFeature f) throws IOException {
   writer.next();
   writer.writeFeatureID(f.getID());
   for (int i = 0; i < f.getAttributeCount(); i++) {
     if (f.getAttribute(i) == null) writer.write(i, "<null>");
     else writer.write(i, f.getAttribute(i));
   }
 }
  private void checkStackedPoint(
      Coordinate expectedCoordinate, int count, int countUnique, SimpleFeature f) {
    if (expectedCoordinate != null) {
      Point p = (Point) f.getDefaultGeometry();
      assertEquals(expectedCoordinate, p.getCoordinate());
    }

    assertEquals(count, f.getAttribute(PointStackerProcess.ATTR_COUNT));
    assertEquals(countUnique, f.getAttribute(PointStackerProcess.ATTR_COUNT_UNIQUE));
  }
    @Override
    public void map(Text key, SimpleFeature value, Context context)
        throws IOException, InterruptedException {
      Counter counter =
          context.getCounter(CountersEnum.class.getName(), CountersEnum.FEATURES.toString());
      counter.increment(1);

      Object[] values = new Object[] {value.getAttribute("dtg"), value.getAttribute("geom")};
      SimpleFeature feature = new ScalaSimpleFeature(value.getID(), sft, values);
      context.write(text, feature);
    }
  public void testLocalChanges() throws Exception {
    // apply a local change on Central so that we'll get a non empty transaction sent to the client
    VersioningFeatureStore restricted =
        (VersioningFeatureStore) synchStore.getFeatureSource("restricted");
    SimpleFeatureType schema = restricted.getSchema();
    // remove the third feature
    Id removeFilter =
        ff.id(singleton(ff.featureId("restricted.c15e76ab-e44b-423e-8f85-f6d9927b878a")));
    restricted.removeFeatures(removeFilter);
    assertEquals(3, restricted.getCount(Query.ALL));

    // build the expected PostDiff request
    QName typeName = new QName("http://www.openplans.org/spearfish", "restricted");
    PostDiffType postDiff = new PostDiffType();
    postDiff.setFromVersion(-1);
    postDiff.setToVersion(3);
    postDiff.setTypeName(typeName);
    TransactionType changes = WfsFactory.eINSTANCE.createTransactionType();
    DeleteElementType delete = WfsFactory.eINSTANCE.createDeleteElementType();
    delete.setTypeName(typeName);
    delete.setFilter(removeFilter);
    changes.getDelete().add(delete);
    postDiff.setTransaction(changes);

    // create mock objects that will check the calls are flowing as expected
    GSSClient client = createMock(GSSClient.class);
    expect(client.getCentralRevision((QName) anyObject())).andReturn(new Long(-1));
    client.postDiff(postDiff);
    expect(client.getDiff((GetDiffType) anyObject())).andReturn(new GetDiffResponseType());
    replay(client);
    GSSClientFactory factory = createMock(GSSClientFactory.class);
    expect(factory.createClient(new URL("http://localhost:8081/geoserver/ows"), null, null))
        .andReturn(client);
    replay(factory);

    synch.clientFactory = factory;

    // perform synch
    Date start = new Date();
    synch.synchronizeOustandlingLayers();
    Date end = new Date();

    // check we stored the last synch marker
    SimpleFeature f =
        getSingleFeature(fsUnitTables, ff.equal(ff.property("table_id"), ff.literal(1), false));
    Date lastSynch = (Date) f.getAttribute("last_synchronization");
    assertNotNull(lastSynch);
    assertTrue(lastSynch.compareTo(start) >= 0 && lastSynch.compareTo(end) <= 0);
    assertNull(f.getAttribute("last_failure"));

    // check we marked the unit as succeded
    f = getSingleFeature(fsUnits, ff.equal(ff.property("unit_name"), ff.literal("unit1"), false));
    assertFalse((Boolean) f.getAttribute("errors"));
  }
Beispiel #13
0
  public void testImportCSVIndirect() throws Exception {
    File dir = unpack("csv/locations.zip");
    String wsName = getCatalog().getDefaultWorkspace().getName();

    DataStoreInfo h2DataStore = createH2DataStore(wsName, "csvindirecttest");
    SpatialFile importData = new SpatialFile(new File(dir, "locations.csv"));

    ImportContext context = importer.createContext(importData, h2DataStore);
    assertEquals(1, context.getTasks().size());
    ImportTask task = context.getTasks().get(0);

    TransformChain transformChain = task.getTransform();
    transformChain.add(new AttributesToPointGeometryTransform("LAT", "LON"));
    assertEquals(ImportTask.State.NO_CRS, task.getState());

    LayerInfo layer = task.getLayer();
    ResourceInfo resource = layer.getResource();
    resource.setSRS("EPSG:4326");

    assertTrue("Item not ready", importer.prep(task));
    assertEquals(ImportTask.State.READY, task.getState());

    context.updated();
    assertEquals(ImportContext.State.PENDING, context.getState());
    importer.run(context);

    assertEquals(ImportContext.State.COMPLETE, context.getState());
    FeatureTypeInfo fti = (FeatureTypeInfo) resource;
    SimpleFeatureType featureType = (SimpleFeatureType) fti.getFeatureType();
    GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor();
    assertNotNull("Expecting geometry", geometryDescriptor);
    assertEquals("Invalid geometry name", "location", geometryDescriptor.getLocalName());
    assertEquals(3, featureType.getAttributeCount());
    FeatureSource<? extends FeatureType, ? extends Feature> featureSource =
        fti.getFeatureSource(null, null);
    FeatureCollection<? extends FeatureType, ? extends Feature> features =
        featureSource.getFeatures();
    assertEquals(9, features.size());
    FeatureIterator<? extends Feature> featureIterator = features.features();
    assertTrue("Expected features", featureIterator.hasNext());
    SimpleFeature feature = (SimpleFeature) featureIterator.next();
    assertNotNull(feature);
    assertEquals("Invalid city attribute", "Trento", feature.getAttribute("CITY"));
    assertEquals("Invalid number attribute", 140, feature.getAttribute("NUMBER"));
    Object geomAttribute = feature.getAttribute("location");
    assertNotNull("Expected geometry", geomAttribute);
    Point point = (Point) geomAttribute;
    Coordinate coordinate = point.getCoordinate();
    assertEquals("Invalid x coordinate", 11.12, coordinate.x, 0.1);
    assertEquals("Invalid y coordinate", 46.07, coordinate.y, 0.1);
    featureIterator.close();
  }
 private void addAttributeValues(
     SimpleFeature feature, List<String> retained, SimpleFeatureBuilder fb) {
   Iterator<AttributeDescriptor> firstIterator =
       feature.getType().getAttributeDescriptors().iterator();
   while (firstIterator.hasNext()) {
     AttributeDescriptor ad = firstIterator.next();
     Object firstAttribute = feature.getAttribute(ad.getLocalName());
     if ((retained == null || retained.contains(ad.getLocalName()))
         && !(firstAttribute instanceof Geometry)) {
       fb.add(feature.getAttribute(ad.getLocalName()));
     }
   }
 }
Beispiel #15
0
 public ZonalSystem(final String zonesShapeFileName) {
   final GeometryFactory geometryFactory = new GeometryFactory();
   final WKTReader wktReader = new WKTReader(geometryFactory);
   for (SimpleFeature ft : ShapeFileReader.getAllFeatures(zonesShapeFileName)) {
     try {
       final String zoneId = ft.getAttribute("ZONE").toString();
       final Zone zone = new Zone(zoneId);
       zone.setGeometry(wktReader.read((ft.getAttribute("the_geom")).toString()));
       this.id2zone.put(zoneId, zone);
     } catch (ParseException e) {
       throw new RuntimeException(e);
     }
   }
 }
    public BugInfo(SimpleFeature next, long idx) throws IndexOutOfBoundsException, ParseException {

      bugId = idx;
      attributes = new HashMap<>();
      Collection<Property> properties = next.getProperties();
      Iterator<Property> it = properties.iterator();
      while (it.hasNext()) {
        Property p = it.next();
        attributes.put(p.getName().toString(), p.getValue().toString());
      }
      this.geom = (Geometry) next.getAttribute(0);
      this.desc = (String) next.getAttribute("error_desc");
      this.id = next.getID();
      name = next.getName();
    }
Beispiel #17
0
  public ShapeElemtex(SimpleFeature f, String tipo) {

    super(f, tipo);

    shapeId = "ELEMTEX" + super.newShapeId();

    // Elemtex trae la geometria en formato MultiLineString
    if (f.getDefaultGeometry()
        .getClass()
        .getName()
        .equals("com.vividsolutions.jts.geom.MultiLineString")) {

      MultiLineString l = (MultiLineString) f.getDefaultGeometry();
      LineString line = new LineString(l.getCoordinates(), null, 0);

      coor = line.getEnvelopeInternal().centre();
    } else {
      System.out.println(
          "["
              + new Timestamp(new Date().getTime())
              + "] Formato geometrico "
              + f.getDefaultGeometry().getClass().getName()
              + " desconocido dentro del shapefile ELEMTEX");
    }

    // Los demas atributos son metadatos y de ellos sacamos
    ttggss = (String) f.getAttribute("TTGGSS");

    //		try {
    //			rotulo = new String(f.getAttribute("ROTULO").toString().getBytes(), "UTF-8");
    //			rotulo = eliminarComillas(rotulo);
    //		} catch (UnsupportedEncodingException e) {e.printStackTrace();}

    rotulo = eliminarComillas(f.getAttribute("ROTULO").toString());

    // Dependiendo del ttggss se usa o no
    if (ttggss != null) {
      tags.addAll(ttggssParser(ttggss));
    }

    // Si queremos coger todos los atributos del .shp
    //		this.atributos = new ArrayList<ShapeAttribute>();
    //		for (int x = 1; x < f.getAttributes().size(); x++){
    //			atributos.add(new ShapeAttribute(f.getFeatureType().getDescriptor(x).getType(),
    // f.getAttributes().get(x)));
    //		}

  }
  private static void convertFeature(
      GeometryStreamConverter converter,
      SimpleFeature feature,
      List<String> keyAttributes,
      String projectionWKT)
      throws Exception {
    int shapeType =
        GeometryStreamUtils.getShapeTypeFromGeometryType(
            feature.getType().getType(0).getName().toString());
    // get shape key by concatenating specified attributes
    String shapeKey = "";
    for (int attrIndex = 0; attrIndex < keyAttributes.size(); attrIndex++) {
      Object attributeObject = feature.getAttribute(keyAttributes.get(attrIndex));
      shapeKey += ShapefileUtilities.forAttribute(attributeObject, String.class);
    }
    IFeatureGeometryStream geomStream =
        new JTSFeatureGeometryStream((Geometry) feature.getDefaultGeometry());
    converter.convertFeature(geomStream, shapeType, shapeKey, projectionWKT);

    /*
    if (debugDBF)
    {
    	// debug: print data
    	String attrs = geometryMetadata.shapeID + ", " + "\"" + shapeKey + '"';
    	for (int i = 1; i < feature.getAttributeCount(); i++)
    		attrs += ", \"" + feature.getAttribute(i) + '"';
    	System.out.println(attrs);
    }
    if (debugCounts)
    {
    	System.out.println(String.format("%s (geom %s) has %s vertices", shapeKey, i, coords.length));
    }
    */
  }
Beispiel #19
0
  /**
   * Finds the geometric attribute requested by the symbolizer.
   *
   * @param feature The victim
   * @param symbolizer The symbolizer
   * @param style the resolved style for the specified victim
   * @return The geometry requested in the symbolizer, or the default geometry if none is specified
   */
  private com.vividsolutions.jts.geom.Geometry findGeometry(
      SimpleFeature feature, Symbolizer symbolizer) {
    String geomName = getGeometryPropertyName(symbolizer);
    // get the geometry
    com.vividsolutions.jts.geom.Geometry geometry;
    if (geomName == null || feature.getType().getDescriptor(geomName) == null) {
      geometry = (Geometry) feature.getDefaultGeometry();
    } else {
      geometry = (com.vividsolutions.jts.geom.Geometry) feature.getAttribute(geomName);
    }
    if (geometry == null) {
      return null; // nothing to see here
    }
    // if the symbolizer is a point or text symbolizer generate a suitable
    // location to place the
    // point in order to avoid recomputing that location at each rendering
    // step

    if ((symbolizer instanceof PointSymbolizer || symbolizer instanceof TextSymbolizer)
        && !(geometry instanceof Point)) {
      if (geometry instanceof LineString && !(geometry instanceof LinearRing)) {
        // use the mid point to represent the point/text symbolizer
        // anchor
        Coordinate[] coordinates = geometry.getCoordinates();
        Coordinate start = coordinates[0];
        Coordinate end = coordinates[1];
        Coordinate mid = new Coordinate((start.x + end.x) / 2, (start.y + end.y) / 2);
        geometry = geometry.getFactory().createPoint(mid);
      } else {
        // otherwise use the centroid of the polygon
        geometry = geometry.getCentroid();
      }
    }
    return geometry;
  }
Beispiel #20
0
  /**
   * Put together a map of VPF files and their corresponding TableRows
   *
   * @param file
   * @param row
   */
  private Map generateFileRowMap(VPFFile file, SimpleFeature row) throws IOException {
    String tileFileName = null;
    Map rows = new HashMap();
    rows.put(file, row);
    Iterator joinIter = featureType.getFeatureClass().getJoinList().iterator();
    while (joinIter.hasNext()) {
      ColumnPair columnPair = (ColumnPair) joinIter.next();
      VPFFile primaryFile = getVPFFile(columnPair.column1);
      VPFFile joinFile = null;
      joinFile = getVPFFile(columnPair.column2);

      if (!rows.containsKey(joinFile) && rows.containsKey(primaryFile)) {
        SimpleFeature joinRow = (SimpleFeature) rows.get(primaryFile);

        try {
          int joinID =
              Integer.parseInt(joinRow.getAttribute(columnPair.column1.getName()).toString());
          rows.put(
              joinFile,
              getVPFFile(columnPair.column2).getRowFromId(columnPair.column2.getName(), joinID));
        } catch (NullPointerException exc) {
          // Non-matching joins - just put in a NULL
          rows.put(joinFile, null);
        } catch (IllegalAttributeException exc) {
          // I really don't expect to see this one
          exc.printStackTrace();
          rows.put(joinFile, null);
        }
      }
    }
    return rows;
  }
  /**
   * Sets the feature of the source.
   *
   * <p>This method operates by first clearing the contents of the feature store ({@link
   * #removeFeatures(Filter)}), and then obtaining an appending feature writer and writing all
   * features from <tt>reader</tt> to it.
   */
  public final void setFeatures(FeatureReader<SimpleFeatureType, SimpleFeature> reader)
      throws IOException {
    // remove features
    removeFeatures(Filter.INCLUDE);

    // grab a feature writer for insert
    FeatureWriter<SimpleFeatureType, SimpleFeature> writer = getWriter(Filter.INCLUDE, WRITER_ADD);
    try {
      while (reader.hasNext()) {
        SimpleFeature feature = reader.next();

        // grab next feature and populate it
        // JD: worth a note on how we do this... we take a "pull" approach
        // because the raw schema we are inserting into may not match the
        // schema of the features we are inserting
        SimpleFeature toWrite = writer.next();
        for (int i = 0; i < toWrite.getType().getAttributeCount(); i++) {
          String name = toWrite.getType().getDescriptor(i).getLocalName();
          toWrite.setAttribute(name, feature.getAttribute(name));
        }

        // perform the write
        writer.write();
      }
    } finally {
      writer.close();
    }
  }
 private static Object[] copyAttributes(
     SimpleFeatureType destSchema,
     SimpleFeature source,
     Map<String, Iterator<? extends Geometry>> geometries,
     Map<String, String> attributeMap,
     MathTransform mt) {
   Object[] attributes = new Object[destSchema.getAttributeCount()];
   for (int i = 0; i < attributes.length; i++) {
     String sourceAttributeName = destSchema.getDescriptor(i).getName().getLocalPart();
     String name = attributeMap.get(sourceAttributeName);
     if (name != null) attributes[i] = source.getAttribute(name);
     else {
       attributes[i] = destSchema.getDescriptor(i).getDefaultValue();
     }
     if (attributes[i] instanceof Geometry) {
       Class<? extends Geometry> geomType =
           (Class<? extends Geometry>) destSchema.getDescriptor(i).getType().getBinding();
       if (!geomType.isAssignableFrom(attributes[i].getClass())) {
         Collection<? extends Geometry> geom =
             createCompatibleGeometry((Geometry) attributes[i], geomType);
         Iterator<? extends Geometry> giter = geom.iterator();
         attributes[i] = giter.next();
         if (giter.hasNext()) geometries.put(sourceAttributeName, giter);
       }
       attributes[i] = transformGeom((Geometry) attributes[i], mt);
     }
   }
   return attributes;
 }
  FeatureId addFeature(
      SimpleFeature feature, FeatureWriter<SimpleFeatureType, SimpleFeature> writer)
      throws IOException {
    // grab next feature and populate it
    // JD: worth a note on how we do this... we take a "pull" approach
    // because the raw schema we are inserting into may not match the
    // schema of the features we are inserting
    SimpleFeature toWrite = writer.next();
    for (int i = 0; i < toWrite.getType().getAttributeCount(); i++) {
      String name = toWrite.getType().getDescriptor(i).getLocalName();
      toWrite.setAttribute(name, feature.getAttribute(name));
    }

    // copy over the user data
    if (feature.getUserData().size() > 0) {
      toWrite.getUserData().putAll(feature.getUserData());
    }

    // pass through the fid if the user asked so
    boolean useExisting = Boolean.TRUE.equals(feature.getUserData().get(Hints.USE_PROVIDED_FID));
    if (getQueryCapabilities().isUseProvidedFIDSupported() && useExisting) {
      ((FeatureIdImpl) toWrite.getIdentifier()).setID(feature.getID());
    }

    // perform the write
    writer.write();

    // copy any metadata from the feature that was actually written
    feature.getUserData().putAll(toWrite.getUserData());

    // add the id to the set of inserted
    FeatureId id = toWrite.getIdentifier();
    return id;
  }
  /**
   * @param inputFeature
   * @return
   */
  private int[] extractMultipleValues(SimpleFeature inputFeature, String attributeName) {
    if (inputFeature.getAttribute(attributeName) == null) {
      return new int[0];
    }
    String[] svalues = inputFeature.getAttribute(attributeName).toString().split("\\|");
    int[] values = new int[] {0, 0};

    for (int count = 0; count < svalues.length; count++) {
      try {
        values[count] = Integer.parseInt(svalues[count]);
      } catch (NumberFormatException e) {

      }
    }
    return values;
  }
Beispiel #25
0
  public Map<String, Zone> getZonesInsideBoundary(final String zonesBoundaryShapeFileName) {

    final Collection<SimpleFeature> features =
        ShapeFileReader.getAllFeatures(zonesBoundaryShapeFileName);
    if (features.size() != 1) {
      throw new RuntimeException("not exactly one feature in shape file");
    }

    final SimpleFeature feature = features.iterator().next();
    final WKTReader wktreader = new WKTReader();
    final Geometry limitingPolygon;
    try {
      limitingPolygon = wktreader.read(feature.getAttribute("the_geom").toString());
    } catch (ParseException e) {
      throw new RuntimeException(e);
    }

    final Map<String, Zone> result = new LinkedHashMap<String, Zone>();
    for (Map.Entry<String, Zone> id2zoneEntry : this.id2zone.entrySet()) {
      if (limitingPolygon.covers(id2zoneEntry.getValue().getGeometry())) {
        result.put(id2zoneEntry.getKey(), id2zoneEntry.getValue());
      }
    }
    return result;
  }
  /** @see org.geotools.data.FeatureStore#setFeatures(org.geotools.data.FeatureReader) */
  public void setFeatures(FeatureReader<SimpleFeatureType, SimpleFeature> reader)
      throws IOException {
    WFSTransactionState ts = null;

    if (trans == Transaction.AUTO_COMMIT) {
      ts = new WFSTransactionState(ds);
    } else {
      ts = (WFSTransactionState) trans.getState(ds);
    }

    ts.addAction(
        getSchema().getTypeName(), new DeleteAction(getSchema().getTypeName(), Filter.INCLUDE));

    ReferencedEnvelope bounds = null;
    while (reader.hasNext()) {

      try {
        SimpleFeature f = reader.next();
        List<AttributeDescriptor> atrs = f.getFeatureType().getAttributeDescriptors();
        for (int i = 0; i < atrs.size(); i++) {
          if (atrs.get(i) instanceof GeometryDescriptor) {
            Geometry g = (Geometry) f.getAttribute(i);
            CoordinateReferenceSystem cs =
                ((GeometryDescriptor) atrs.get(i)).getCoordinateReferenceSystem();
            if (cs != null && !cs.getIdentifiers().isEmpty())
              g.setUserData(cs.getIdentifiers().iterator().next().toString());
            if (g == null) continue;
            if (bounds == null) {
              bounds = new ReferencedEnvelope(g.getEnvelopeInternal(), cs);
            } else {
              bounds.expandToInclude(g.getEnvelopeInternal());
            }
          }
        }
        ts.addAction(getSchema().getTypeName(), new InsertAction(f));
      } catch (NoSuchElementException e) {
        WFS_1_0_0_DataStore.LOGGER.warning(e.toString());
      } catch (IllegalAttributeException e) {
        WFS_1_0_0_DataStore.LOGGER.warning(e.toString());
      }
    }

    // Fire a notification.
    // JE
    if (bounds == null) {
      // if bounds are null then send an envelope to say that features were added but
      // at an unknown location.
      bounds = new ReferencedEnvelope(getSchema().getCoordinateReferenceSystem());
      ((WFS_1_0_0_DataStore) getDataStore())
          .listenerManager.fireFeaturesRemoved(
              getSchema().getTypeName(), getTransaction(), bounds, false);
    } else {
      ((WFS_1_0_0_DataStore) getDataStore())
          .listenerManager.fireFeaturesRemoved(
              getSchema().getTypeName(), getTransaction(), bounds, false);
    }
    if (trans == Transaction.AUTO_COMMIT) {
      ts.commit();
    }
  }
  @Test
  public void testModifyIssue() throws Exception {
    String newDescription = "new modified description"; // $NON-NLS-1$

    list.clear();
    list.add(IssuesListTestHelper.createFeatureIssue("test")); // $NON-NLS-1$

    list.refresh();
    assertEquals(1, list.size());

    IIssue issue = list.get(0);
    issue.setDescription(newDescription);
    ((IRemoteIssuesList) list).save(issue);
    FilterFactory factory = CommonFactoryFinder.getFilterFactory(GeoTools.getDefaultHints());

    Expression expr2 = factory.literal(issue.getId());
    Expression expr1 = factory.property(IssuesListTestHelper.ISSUE_ID_ATTR);
    Filter filter = factory.equals(expr1, expr2);

    SimpleFeature next =
        store
            .getFeatureSource(featureType.getName().getLocalPart())
            .getFeatures(filter)
            .features()
            .next();
    assertEquals(newDescription, next.getAttribute(IssuesListTestHelper.DESCRIPTION_ATTR));
  }
Beispiel #28
0
 private Filter createFilter(SimpleFeature feature, FeatureTypeRelation rel) {
   FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
   List<Filter> filters = new ArrayList<Filter>();
   for (FeatureTypeRelationKey key : rel.getRelationKeys()) {
     AttributeDescriptor rightSide = key.getRightSide();
     AttributeDescriptor leftSide = key.getLeftSide();
     Object value = feature.getAttribute(leftSide.getName());
     if (value == null) {
       continue;
     }
     if (AttributeDescriptor.GEOMETRY_TYPES.contains(rightSide.getType())
         && AttributeDescriptor.GEOMETRY_TYPES.contains(leftSide.getType())) {
       filters.add(ff.not(ff.isNull(ff.property(rightSide.getName()))));
       filters.add(ff.intersects(ff.property(rightSide.getName()), ff.literal(value)));
     } else {
       filters.add(ff.equals(ff.property(rightSide.getName()), ff.literal(value)));
     }
   }
   if (filters.size() > 1) {
     return ff.and(filters);
   } else if (filters.size() == 1) {
     return filters.get(0);
   } else {
     return null;
   }
 }
    @Override
    public FileGroup next() {
      SimpleFeature next = null;

      // look for cached feature
      if (cachedNext != null) {
        next = cachedNext;
        cachedNext = null;
      } else {
        next = featureIterator.next();
      }

      // Avoid adding the feature to a collection to reduce memory consumption
      // we only take note of the firstFeature
      int groupedFeatures = 0;
      SimpleFeature firstFeature = null;

      // resolve the location
      String granuleLocation = (String) next.getAttribute(locationAttributeName);
      URL resolved = pathType.resolvePath(parentLocation, granuleLocation);
      File file = null;
      if (resolved != null) {
        file = DataUtilities.urlToFile(resolved);
        if (file != null && file.exists()) {
          groupedFeatures++;
          firstFeature = next;
        }
      }
      if (groupedFeatures == 0) {
        return null;
      }

      while (featureIterator.hasNext()) {
        // Group features sharing same location
        next = featureIterator.next();
        String nextLocation = (String) next.getAttribute(locationAttributeName);
        if (granuleLocation.equalsIgnoreCase(nextLocation)) {
          groupedFeatures++;
        } else {
          cachedNext = next;
          break;
        }
      }

      // I have to group the features to get the ranges.
      return buildFileGroup(file, groupedFeatures > 1, firstFeature);
    }
  /**
   * @param trace
   * @param dataStore
   * @param outputObjects
   * @param intValue
   * @param total
   * @param errors
   * @param geoName
   * @param aggregationLevel
   * @return
   * @throws IOException
   */
  private int aggregateArcsOnGrid(
      int trace,
      DataStore dataStore,
      OutputObject[] outputObjects,
      int total,
      int errors,
      int startErrors,
      String outputName,
      int aggregationLevel)
      throws IOException {
    try {
      String inputGeometryName = getInputGeometryName(dataStore);

      SimpleFeature gridFeature = null;
      while ((gridFeature = readInput()) != null) {

        int id = nextId();
        int idTematico = ((BigDecimal) gridFeature.getAttribute("gid")).intValue();

        Geometry cell = (Geometry) gridFeature.getDefaultGeometry();

        FeatureSource<SimpleFeatureType, SimpleFeature> reader =
            createInputReader(dataStore, Transaction.AUTO_COMMIT, null);

        FeatureIterator<SimpleFeature> iterator =
            reader
                .getFeatures(
                    filterFactory.intersects(
                        filterFactory.property(inputGeometryName), filterFactory.literal(cell)))
                .features();

        try {
          errors =
              aggregateStep(
                  trace,
                  dataStore,
                  outputObjects,
                  total,
                  errors,
                  startErrors,
                  outputName,
                  id,
                  idTematico,
                  iterator,
                  cell,
                  false,
                  true);
        } finally {
          iterator.close();
        }
      }
      importFinished(total, errors - startErrors, "Data imported in " + outputName);

    } finally {
      closeInputReader();
    }

    return errors;
  }