Example #1
0
  public void testImportIntoDatabaseWithEncoding() throws Exception {
    Catalog cat = getCatalog();

    DataStoreInfo ds = createH2DataStore(cat.getDefaultWorkspace().getName(), "ming");

    File dir = tmpDir();
    unpack("shape/ming_time.zip", dir);

    ImportContext context = importer.createContext(new Directory(dir), ds);
    assertEquals(1, context.getTasks().size());

    context.getTasks().get(0).getData().setCharsetEncoding("UTF-8");
    importer.run(context);

    FeatureTypeInfo info = (FeatureTypeInfo) context.getTasks().get(0).getLayer().getResource();
    FeatureSource<? extends FeatureType, ? extends Feature> fs = info.getFeatureSource(null, null);
    FeatureCollection<? extends FeatureType, ? extends Feature> features = fs.getFeatures();
    FeatureIterator<? extends Feature> it = features.features();
    assertTrue(it.hasNext());
    SimpleFeature next = (SimpleFeature) it.next();
    // let's test some attributes to see if they were digested properly
    String type_ch = (String) next.getAttribute("type_ch");
    assertEquals("卫", type_ch);
    String name_ch = (String) next.getAttribute("name_ch");
    assertEquals("杭州前卫", name_ch);

    it.close();
  }
Example #2
0
  public void testImportKMLIndirect() throws Exception {
    File dir = unpack("kml/sample.zip");
    String wsName = getCatalog().getDefaultWorkspace().getName();
    DataStoreInfo h2DataStore = createH2DataStore(wsName, "kmltest");
    SpatialFile importData = new SpatialFile(new File(dir, "sample.kml"));
    ImportContext context = importer.createContext(importData, h2DataStore);
    assertEquals(1, context.getTasks().size());
    ImportTask task = context.getTasks().get(0);

    LayerInfo layer = task.getLayer();
    ResourceInfo resource = layer.getResource();
    assertEquals("Invalid srs", "EPSG:4326", resource.getSRS());
    ReferencedEnvelope emptyBounds = new ReferencedEnvelope();
    emptyBounds.setToNull();
    assertTrue("Unexpected bounding box", emptyBounds.equals(resource.getNativeBoundingBox()));
    // transform chain to limit characters
    // otherwise we get a sql exception thrown
    TransformChain transformChain = task.getTransform();
    transformChain.add(new DescriptionLimitingTransform());
    importer.run(context);
    Exception error = task.getError();
    if (error != null) {
      error.printStackTrace();
      fail(error.getMessage());
    }
    assertFalse("Bounding box not updated", emptyBounds.equals(resource.getNativeBoundingBox()));
    FeatureTypeInfo fti = (FeatureTypeInfo) resource;
    assertEquals("Invalid type name", "sample", fti.getName());
    FeatureSource<? extends FeatureType, ? extends Feature> featureSource =
        fti.getFeatureSource(null, null);
    assertEquals("Unexpected feature count", 20, featureSource.getCount(Query.ALL));
  }
  @Override
  public FeatureIterator getSortedFeatures(
      GeometryDescriptor geom,
      ReferencedEnvelope latLongEnv,
      ReferencedEnvelope nativeEnv,
      Connection cacheConn)
      throws Exception {
    FeatureSource fs = featureType.getFeatureSource(null, null);

    // build the bbox filter
    FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);

    BBOX filter =
        ff.bbox(
            geom.getLocalName(),
            nativeEnv.getMinX(),
            nativeEnv.getMinY(),
            nativeEnv.getMaxX(),
            nativeEnv.getMaxY(),
            null);

    // build an optimized query (only the necessary attributes
    Query q = new Query();
    q.setFilter(filter);
    // TODO: enable this when JTS learns how to compute centroids
    // without triggering the
    // generation of Coordinate[] out of the sequences...
    // q.setHints(new Hints(Hints.JTS_COORDINATE_SEQUENCE_FACTORY,
    // PackedCoordinateSequenceFactory.class));
    q.setPropertyNames(new String[] {geom.getLocalName()});

    // return the reader
    return fs.getFeatures(q).features();
  }
  /**
   * @param trace
   * @param dataStore
   * @param outputObjects
   * @param intValue
   * @param total
   * @param errors
   * @param geoName
   * @param aggregationLevel
   * @return
   * @throws IOException
   */
  private int aggregateArcsOnGrid(
      int trace,
      DataStore dataStore,
      OutputObject[] outputObjects,
      int total,
      int errors,
      int startErrors,
      String outputName,
      int aggregationLevel)
      throws IOException {
    try {
      String inputGeometryName = getInputGeometryName(dataStore);

      SimpleFeature gridFeature = null;
      while ((gridFeature = readInput()) != null) {

        int id = nextId();
        int idTematico = ((BigDecimal) gridFeature.getAttribute("gid")).intValue();

        Geometry cell = (Geometry) gridFeature.getDefaultGeometry();

        FeatureSource<SimpleFeatureType, SimpleFeature> reader =
            createInputReader(dataStore, Transaction.AUTO_COMMIT, null);

        FeatureIterator<SimpleFeature> iterator =
            reader
                .getFeatures(
                    filterFactory.intersects(
                        filterFactory.property(inputGeometryName), filterFactory.literal(cell)))
                .features();

        try {
          errors =
              aggregateStep(
                  trace,
                  dataStore,
                  outputObjects,
                  total,
                  errors,
                  startErrors,
                  outputName,
                  id,
                  idTematico,
                  iterator,
                  cell,
                  false,
                  true);
        } finally {
          iterator.close();
        }
      }
      importFinished(total, errors - startErrors, "Data imported in " + outputName);

    } finally {
      closeInputReader();
    }

    return errors;
  }
  /**
   * Ensure Line crosses the other Line at a node.
   *
   * <p>
   *
   * @param layers a HashMap of key="TypeName" value="FeatureSource"
   * @param envelope The bounding box of modified features
   * @param results Storage for the error and warning messages
   * @return True if no features intersect. If they do then the validation failed.
   * @throws Exception DOCUMENT ME!
   * @see org.geotools.validation.IntegrityValidation#validate(java.util.Map,
   *     com.vividsolutions.jts.geom.Envelope, org.geotools.validation.ValidationResults)
   */
  public boolean validate(Map layers, Envelope envelope, ValidationResults results)
      throws Exception {
    boolean r = true;

    FeatureSource<SimpleFeatureType, SimpleFeature> fsLine =
        (FeatureSource<SimpleFeatureType, SimpleFeature>) layers.get(getLineTypeRef());

    FeatureCollection<SimpleFeatureType, SimpleFeature> fcLine = fsLine.getFeatures();
    FeatureIterator<SimpleFeature> fLine = fcLine.features();

    FeatureSource<SimpleFeatureType, SimpleFeature> fsRLine =
        (FeatureSource<SimpleFeatureType, SimpleFeature>) layers.get(getRestrictedLineTypeRef());

    FeatureCollection<SimpleFeatureType, SimpleFeature> fcRLine = fsRLine.getFeatures();

    while (fLine.hasNext()) {
      SimpleFeature line = fLine.next();
      FeatureIterator<SimpleFeature> fRLine = fcRLine.features();
      Geometry lineGeom = (Geometry) line.getDefaultGeometry();
      if (envelope.contains(lineGeom.getEnvelopeInternal())) {
        // 	check for valid comparison
        if (LineString.class.isAssignableFrom(lineGeom.getClass())) {
          while (fRLine.hasNext()) {
            SimpleFeature rLine = fRLine.next();
            Geometry rLineGeom = (Geometry) rLine.getDefaultGeometry();
            if (envelope.contains(rLineGeom.getEnvelopeInternal())) {
              if (LineString.class.isAssignableFrom(rLineGeom.getClass())) {
                if (lineGeom.intersects(rLineGeom)) {
                  if (!hasPair(
                      ((LineString) lineGeom).getCoordinateSequence(),
                      ((LineString) rLineGeom).getCoordinateSequence())) {
                    results.error(
                        rLine,
                        "Line does not intersect line at node covered by the specified Line.");
                    r = false;
                  }
                } else {
                  results.warning(rLine, "Does not intersect the LineString");
                }
                // do next.
              } else {
                fcRLine.remove(rLine);
                results.warning(
                    rLine, "Invalid type: this feature is not a derivative of a LineString");
              }
            } else {
              fcRLine.remove(rLine);
            }
          }
        } else {
          results.warning(line, "Invalid type: this feature is not a derivative of a LineString");
        }
      }
    }
    return r;
  }
  public Resolution split() throws JSONException {
    JSONObject json = new JSONObject();

    json.put("success", Boolean.FALSE);
    String error = null;

    if (appLayer == null) {
      error = "Invalid parameters";
    } else if (unauthorized) {
      error = "Not authorized";
    } else {
      FeatureSource fs = null;
      try {
        if (this.splitFeatureFID == null) {
          throw new IllegalArgumentException("Split feature ID is null");
        }
        if (this.toSplitWithFeature == null) {
          throw new IllegalArgumentException("Split line is null");
        }

        fs = this.layer.getFeatureType().openGeoToolsFeatureSource();
        if (!(fs instanceof SimpleFeatureStore)) {
          throw new IllegalArgumentException("Feature source does not support editing");
        }
        this.store = (SimpleFeatureStore) fs;

        List<FeatureId> ids = this.splitFeature();

        if (ids.size() < 2) {
          throw new IllegalArgumentException("Split failed, check that geometries overlap");
        }

        json.put("fids", ids);
        json.put("success", Boolean.TRUE);
      } catch (IllegalArgumentException e) {
        log.warn("Split error", e);
        error = e.getLocalizedMessage();
      } catch (Exception e) {
        log.error(String.format("Exception splitting feature %s", this.splitFeatureFID), e);
        error = e.toString();
        if (e.getCause() != null) {
          error += "; cause: " + e.getCause().toString();
        }
      } finally {
        if (fs != null) {
          fs.getDataStore().dispose();
        }
      }
    }

    if (error != null) {
      json.put("error", error);
    }
    return new StreamingResolution("application/json", new StringReader(json.toString()));
  }
Example #7
0
  @Override
  public ReferencedEnvelope getBounds() {
    try {
      ReferencedEnvelope bounds = featureSource.getBounds();
      if (bounds != null) {
        FeatureType schema = featureSource.getSchema();
        CoordinateReferenceSystem schemaCrs = schema.getCoordinateReferenceSystem();
        CoordinateReferenceSystem boundsCrs = bounds.getCoordinateReferenceSystem();

        if (boundsCrs == null && schemaCrs != null) {
          LOGGER.warning(
              "Bounds crs not defined; assuming bounds from schema are correct for "
                  + featureSource);
          bounds =
              new ReferencedEnvelope(
                  bounds.getMinX(),
                  bounds.getMaxX(),
                  bounds.getMinY(),
                  bounds.getMaxY(),
                  schemaCrs);
        }
        if (boundsCrs != null
            && schemaCrs != null
            && !CRS.equalsIgnoreMetadata(boundsCrs, schemaCrs)) {
          LOGGER.warning(
              "Bounds crs and schema crs are not consistent; forcing the use of the schema crs so they are consistent");
          // bounds = bounds.transform(schemaCrs, true );
          bounds =
              new ReferencedEnvelope(
                  bounds.getMinX(),
                  bounds.getMaxX(),
                  bounds.getMinY(),
                  bounds.getMaxY(),
                  schemaCrs);
        }
        return bounds;
      }
    } catch (IOException e) {
      // feature bounds unavailable
    }

    CoordinateReferenceSystem crs = featureSource.getSchema().getCoordinateReferenceSystem();
    if (crs != null) {
      // returns the envelope based on the CoordinateReferenceSystem
      Envelope envelope = CRS.getEnvelope(crs);
      if (envelope != null) {
        return new ReferencedEnvelope(envelope); // nice!
      } else {
        return new ReferencedEnvelope(crs); // empty bounds
      }
    } else {
      return null; // unknown
    }
  }
Example #8
0
  public void testImportCSVIndirect() throws Exception {
    File dir = unpack("csv/locations.zip");
    String wsName = getCatalog().getDefaultWorkspace().getName();

    DataStoreInfo h2DataStore = createH2DataStore(wsName, "csvindirecttest");
    SpatialFile importData = new SpatialFile(new File(dir, "locations.csv"));

    ImportContext context = importer.createContext(importData, h2DataStore);
    assertEquals(1, context.getTasks().size());
    ImportTask task = context.getTasks().get(0);

    TransformChain transformChain = task.getTransform();
    transformChain.add(new AttributesToPointGeometryTransform("LAT", "LON"));
    assertEquals(ImportTask.State.NO_CRS, task.getState());

    LayerInfo layer = task.getLayer();
    ResourceInfo resource = layer.getResource();
    resource.setSRS("EPSG:4326");

    assertTrue("Item not ready", importer.prep(task));
    assertEquals(ImportTask.State.READY, task.getState());

    context.updated();
    assertEquals(ImportContext.State.PENDING, context.getState());
    importer.run(context);

    assertEquals(ImportContext.State.COMPLETE, context.getState());
    FeatureTypeInfo fti = (FeatureTypeInfo) resource;
    SimpleFeatureType featureType = (SimpleFeatureType) fti.getFeatureType();
    GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor();
    assertNotNull("Expecting geometry", geometryDescriptor);
    assertEquals("Invalid geometry name", "location", geometryDescriptor.getLocalName());
    assertEquals(3, featureType.getAttributeCount());
    FeatureSource<? extends FeatureType, ? extends Feature> featureSource =
        fti.getFeatureSource(null, null);
    FeatureCollection<? extends FeatureType, ? extends Feature> features =
        featureSource.getFeatures();
    assertEquals(9, features.size());
    FeatureIterator<? extends Feature> featureIterator = features.features();
    assertTrue("Expected features", featureIterator.hasNext());
    SimpleFeature feature = (SimpleFeature) featureIterator.next();
    assertNotNull(feature);
    assertEquals("Invalid city attribute", "Trento", feature.getAttribute("CITY"));
    assertEquals("Invalid number attribute", 140, feature.getAttribute("NUMBER"));
    Object geomAttribute = feature.getAttribute("location");
    assertNotNull("Expected geometry", geomAttribute);
    Point point = (Point) geomAttribute;
    Coordinate coordinate = point.getCoordinate();
    assertEquals("Invalid x coordinate", 11.12, coordinate.x, 0.1);
    assertEquals("Invalid y coordinate", 46.07, coordinate.y, 0.1);
    featureIterator.close();
  }
 /**
  * @param dataStore
  * @throws IOException
  */
 private void checkProcessingAllowed(JDBCDataStore dataStore, int processing) throws IOException {
   // access through catalog to let the security jump in
   FeatureTypeInfo ft =
       catalog.getResourceByName("destination", "siig_mtd_d_elaborazione", FeatureTypeInfo.class);
   FeatureSource source = ft.getFeatureSource(null, null);
   FeatureCollection fc =
       source.getFeatures(ff2.equals(ff2.property("id_elaborazione"), ff2.literal(processing)));
   // check if the given processing is allowed (if not is filtered out from results
   // so size should be 0)
   if (fc.size() != 1) {
     throw new ProcessException("Operation not allowed");
   }
 }
  private OpenBitSet applySpatialFilter(
      Set<FeatureId> matches, Multimap<FeatureId, Integer> docIndexLookup, OpenBitSet bits)
      throws IOException {

    JeevesJCS jcs = getJCSCache();
    processCachedFeatures(jcs, matches, docIndexLookup, bits);

    while (!matches.isEmpty()) {
      Id fidFilter;
      if (matches.size() > MAX_FIDS_PER_QUERY) {
        FeatureId[] subset = new FeatureId[MAX_FIDS_PER_QUERY];
        int i = 0;
        Iterator<FeatureId> iter = matches.iterator();
        while (iter.hasNext() && i < MAX_FIDS_PER_QUERY) {
          subset[i] = iter.next();
          iter.remove();
          i++;
        }
        fidFilter = _filterFactory.id(subset);
      } else {
        fidFilter = _filterFactory.id(matches);
        matches = Collections.emptySet();
      }

      FeatureSource<SimpleFeatureType, SimpleFeature> _featureSource = sourceAccessor.one();
      String ftn = _featureSource.getSchema().getName().getLocalPart();
      String[] geomAtt = {_featureSource.getSchema().getGeometryDescriptor().getLocalName()};
      FeatureCollection<SimpleFeatureType, SimpleFeature> features =
          _featureSource.getFeatures(new org.geotools.data.Query(ftn, fidFilter, geomAtt));
      FeatureIterator<SimpleFeature> iterator = features.features();

      try {
        while (iterator.hasNext()) {
          SimpleFeature feature = iterator.next();
          FeatureId featureId = feature.getIdentifier();
          jcs.put(featureId.getID(), feature.getDefaultGeometry());
          if (evaluateFeature(feature)) {
            for (int doc : docIndexLookup.get(featureId)) {
              bits.set(doc);
            }
          }
        }
      } catch (CacheException e) {
        throw new Error(e);
      } finally {
        iterator.close();
      }
    }
    return bits;
  }
Example #11
0
 /** Used to connect/disconnect a FeatureListener if any map layer listeners are registered. */
 protected synchronized void connectDataListener(boolean listen) {
   if (sourceListener == null) {
     sourceListener =
         new FeatureListener() {
           public void changed(FeatureEvent featureEvent) {
             fireMapLayerListenerLayerChanged(MapLayerEvent.DATA_CHANGED);
           }
         };
   }
   if (listen) {
     featureSource.addFeatureListener(sourceListener);
   } else {
     featureSource.removeFeatureListener(sourceListener);
   }
 }
  @Test
  @Ignore
  // this test is skipped, it checks something that formally is what we should expect,
  // but for the moment this breaks big time the shapefile renderer optimizations
  // until we get rid of the latter let's be a little lax on this one...
  public void testFeatureSource() throws Exception {
    File file = copyShapefiles("shapes/archsites.shp");
    tempDir = file.getParentFile();

    DataStore dds = new DirectoryDataStore(tempDir, getFileStoreFactory());
    FeatureSource fs = dds.getFeatureSource("archsites");
    assertNotNull(fs);
    assertSame(dds, fs.getDataStore());
    dds.dispose();
  }
  public void update(Object updatedObject) {
    if (updatedObject instanceof List) {
      String text = null;
      List layers = (List) updatedObject;
      for (Object layer : layers) {
        if (layer instanceof JGrassMapGeoResource) {
          JGrassMapGeoResource rasterMapResource = (JGrassMapGeoResource) layer;
          try {
            text = rasterMapResource.getInfo(null).getName();

            JGrassRegion fileWindow = rasterMapResource.getFileWindow();
            if (fileWindow != null) {
              setWidgetsToWindow(fileWindow);
            }
          } catch (IOException e1) {
            return;
          }
        } else if (layer instanceof DataStore || layer instanceof UDIGFeatureStore) {
          try {
            DataStore store = ((DataStore) layer);
            FeatureSource featureStore = store.getFeatureSource(store.getTypeNames()[0]);
            Envelope envelope = featureStore.getBounds();

            ActiveRegionStyle style = getActiveRegionStyle();
            JGrassRegion activeWindow =
                new JGrassRegion(
                    style.west, style.east, style.south, style.north, style.rows, style.cols);
            JGrassRegion newWindow =
                JGrassRegion.adaptActiveRegionToEnvelope(envelope, activeWindow);
            northText.setText(String.valueOf(newWindow.getNorth()));
            southText.setText(String.valueOf(newWindow.getSouth()));
            eastText.setText(String.valueOf(newWindow.getEast()));
            westText.setText(String.valueOf(newWindow.getWest()));
            textModified(bound_type);
          } catch (IOException e1) {
            e1.printStackTrace();
            return;
          }

        } else {
          return;
        }
      }
      if (text == null) {
        return;
      }
    }
  }
Example #14
0
  public static List<SimpleFeature> fromShapefile(String shapePath) throws IOException {

    FileDataStore store = FileDataStoreFinder.getDataStore(new File(shapePath));
    FeatureSource<SimpleFeatureType, SimpleFeature> featureSource = store.getFeatureSource();
    FeatureCollection<SimpleFeatureType, SimpleFeature> featureCollection =
        featureSource.getFeatures();

    List<SimpleFeature> featuresList = new ArrayList<SimpleFeature>();
    FeatureIterator<SimpleFeature> featureIterator = featureCollection.features();
    while (featureIterator.hasNext()) {
      SimpleFeature feature = featureIterator.next();
      featuresList.add(feature);
    }
    featureIterator.close();

    return featuresList;
  }
Example #15
0
  public Style style(IProgressMonitor monitor) {
    URL url = parent.getIdentifier();
    File file = URLUtils.urlToFile(url);
    String shp = file.getAbsolutePath();

    StyleFactory styleFactory = CommonFactoryFinder.getStyleFactory(GeoTools.getDefaultHints());

    // strip off the extension and check for sld
    String sld = shp.substring(0, shp.length() - 4) + ".sld"; // $NON-NLS-1$
    File f = new File(sld);
    if (!f.exists()) {
      // try upper case
      sld = shp.substring(0, shp.length() - 4) + ".SLD"; // $NON-NLS-1$
      f = new File(sld);
    }

    if (f.exists()) {
      // parse it up
      SLDParser parser = new SLDParser(styleFactory);
      try {
        parser.setInput(f);
      } catch (FileNotFoundException e) {
        return null; // well that is unexpected since f.exists()
      }
      Style[] styles = parser.readXML();

      FeatureSource<SimpleFeatureType, SimpleFeature> source;
      try {
        source = featureSource(null);
      } catch (IOException e) {
        return null; // does not look like there is anything in the shapefile
      }
      SimpleFeatureType featureType = source.getSchema();
      // put the first one on
      if (styles != null && styles.length > 0) {
        Style style = SLD.matchingStyle(styles, featureType);
        if (style == null) {
          style = styles[0];
        }

        makeGraphicsAbsolute(file, style);
        return style;
      }
    }
    return null; // well nothing worked out; make your own style
  }
Example #16
0
  /**
   * Returns the collection of all values of the dimension attribute, eventually sorted if the
   * native capabilities allow for it
   *
   * @param typeInfo
   * @param dimension
   * @return
   * @throws IOException
   */
  FeatureCollection getDimensionCollection(FeatureTypeInfo typeInfo, DimensionInfo dimension)
      throws IOException {
    // grab the feature source
    FeatureSource source = null;
    try {
      source = typeInfo.getFeatureSource(null, GeoTools.getDefaultHints());
    } catch (IOException e) {
      throw new ServiceException(
          "Could not get the feauture source to list time info for layer "
              + typeInfo.getPrefixedName(),
          e);
    }

    // build query to grab the dimension values
    final Query dimQuery = new Query(source.getSchema().getName().getLocalPart());
    dimQuery.setPropertyNames(Arrays.asList(dimension.getAttribute()));
    return source.getFeatures(dimQuery);
  }
  protected org.opengis.filter.Filter createFilter(
      FeatureSource<SimpleFeatureType, SimpleFeature> source) {
    String geomAttName = source.getSchema().getGeometryDescriptor().getLocalName();
    PropertyName geomPropertyName = _filterFactory.property(geomAttName);

    Literal geomExpression = _filterFactory.literal(_geom);
    org.opengis.filter.Filter filter =
        createGeomFilter(_filterFactory, geomPropertyName, geomExpression);
    return filter;
  }
  public void testRandomProcess() throws Exception {
    File f = TestData.file(this, "all_data.shp");
    // System.out.println(f + " " + f.exists());
    URL url = DataUtilities.fileToURL(f);
    DataStoreFactorySpi dataStoreFactory = new ShapefileDataStoreFactory();

    Map<String, Serializable> params2 = new HashMap<String, Serializable>();
    params2.put("url", url);
    params2.put("create spatial index", Boolean.TRUE);
    ShapefileDataStore store = (ShapefileDataStore) dataStoreFactory.createNewDataStore(params2);
    // ShapefileDataStore store = new ShapefileDataStore(url);
    assertNotNull(store);
    FeatureSource featureSource = store.getFeatureSource();
    final FeatureCollection features = featureSource.getFeatures();

    Map<String, Object> params = new HashMap<String, Object>();

    params.put(ClusterMethodFactory.NAME.key, "random");
    params.put(ClusterMethodFactory.POPULATION.key, features);
    params.put(ClusterMethodFactory.POPATTRIBUTE.key, "pop");
    params.put(ClusterMethodFactory.CANCER.key, features);
    params.put(ClusterMethodFactory.CANATTRIBUTE.key, "cases");
    params.put(ClusterMethodFactory.MINRAD.key, 1000.0);
    params.put(ClusterMethodFactory.MAXRAD.key, 5000.0);
    params.put(ClusterMethodFactory.NCIRCLES.key, 500.0);
    params.put(ClusterMethodFactory.TESTNAME.key, "poisson");
    ClusterMethodFactory factory = new ClusterMethodFactory();
    Process process = factory.create(params);
    assertNotNull(process);
    long start = System.currentTimeMillis();
    Map<String, Object> results = process.execute(params, new ClusterMonitor());
    long end = System.currentTimeMillis();
    System.out.println("process took " + ((end - start) / 1000) + " seconds");
    GridCoverage2D grid = (GridCoverage2D) results.get(ClusterMethodFactory.RESULT.key);
    String basename = f.toString();
    basename = basename.substring(0, basename.length() - 4);
    String filename = basename + "_rand.tiff";
    Utilities.writeGrid(filename, grid);

    FeatureCollection outfeatures =
        (FeatureCollection) results.get(ClusterMethodFactory.CIRCLES.key);
    Utilities.writeCircles(basename + "_rand.shp", outfeatures);
  }
Example #19
0
 @Override
 public void dispose() {
   if (featureSource != null) {
     if (sourceListener != null) {
       featureSource.removeFeatureListener(sourceListener);
     }
     featureSource = null;
   }
   style = null;
   query = null;
   super.dispose();
 }
 SimpleFeature getSingleFeature(FeatureSource<SimpleFeatureType, SimpleFeature> fs, Filter f)
     throws IOException {
   FeatureIterator<SimpleFeature> fi = null;
   try {
     fi = fs.getFeatures(f).features();
     return fi.next();
   } finally {
     if (fi != null) {
       fi.close();
     }
   }
 }
Example #21
0
  @Override
  public Layer createLayer(LayerContext ctx, PropertySet configuration) {
    CoordinateReferenceSystem targetCrs = null;
    if (ctx != null) {
      targetCrs = (CoordinateReferenceSystem) ctx.getCoordinateReferenceSystem();
    }
    FeatureCollection<SimpleFeatureType, SimpleFeature> fc;
    fc =
        (FeatureCollection<SimpleFeatureType, SimpleFeature>)
            configuration.getValue(FeatureLayerType.PROPERTY_NAME_FEATURE_COLLECTION);
    if (fc == null) {
      try {
        final URL url =
            (URL) configuration.getValue(FeatureLayerType.PROPERTY_NAME_FEATURE_COLLECTION_URL);
        FeatureSource<SimpleFeatureType, SimpleFeature> featureSource =
            FeatureUtils.getFeatureSource(url);
        fc = featureSource.getFeatures();
      } catch (IOException e) {
        throw new IllegalArgumentException(e);
      }
    }
    final CoordinateReferenceSystem featureCrs =
        (CoordinateReferenceSystem)
            configuration.getValue(FeatureLayerType.PROPERTY_NAME_FEATURE_COLLECTION_CRS);
    final Geometry clipGeometry =
        (Geometry)
            configuration.getValue(FeatureLayerType.PROPERTY_NAME_FEATURE_COLLECTION_CLIP_GEOMETRY);

    fc =
        FeatureUtils.clipCollection(
            fc,
            featureCrs,
            clipGeometry,
            DefaultGeographicCRS.WGS84,
            null,
            targetCrs,
            ProgressMonitor.NULL);

    return new FeatureLayer(this, fc, configuration);
  }
Example #22
0
 private Integer collectionSize(FeatureSource featureSource) {
   final Integer collectionSize;
   {
     int fastCount;
     try {
       fastCount = featureSource.getCount(Query.ALL);
     } catch (IOException e) {
       throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_GET_FEATURES);
     }
     collectionSize = -1 == fastCount ? null : Integer.valueOf(fastCount);
   }
   return collectionSize;
 }
Example #23
0
  public static List<SimpleFeature> fromFeatureLayer(String mapName) throws IOException {

    ILayer selectedLayer = ApplicationGIS.getActiveMap().getEditManager().getSelectedLayer();
    FeatureSource<SimpleFeatureType, SimpleFeature> featureSource =
        selectedLayer.getResource(FeatureSource.class, new NullProgressMonitor());
    FeatureCollection<SimpleFeatureType, SimpleFeature> featureCollection = null;
    Filter filter = selectedLayer.getFilter();
    if (filter.equals(Filter.EXCLUDE)) {
      featureCollection = featureSource.getFeatures();
    } else {
      featureCollection = featureSource.getFeatures(filter);
    }

    List<SimpleFeature> featuresList = new ArrayList<SimpleFeature>();
    FeatureIterator<SimpleFeature> featureIterator = featureCollection.features();
    while (featureIterator.hasNext()) {
      SimpleFeature feature = featureIterator.next();
      featuresList.add(feature);
    }
    featureCollection.close(featureIterator);

    return featuresList;
  }
Example #24
0
  public void testImportIntoDatabaseUpdateModes() throws Exception {
    testImportIntoDatabase();

    DataStoreInfo ds = getCatalog().getDataStoreByName("spearfish");
    assertNotNull(ds);

    File dir = tmpDir();
    unpack("shape/archsites_epsg_prj.zip", dir);
    unpack("shape/bugsites_esri_prj.tar.gz", dir);

    FeatureSource<? extends FeatureType, ? extends Feature> fs =
        getCatalog().getFeatureTypeByName("archsites").getFeatureSource(null, null);
    int archsitesCount = fs.getCount(Query.ALL);
    fs = getCatalog().getFeatureTypeByName("bugsites").getFeatureSource(null, null);
    int bugsitesCount = fs.getCount(Query.ALL);

    ImportContext context = importer.createContext(new Directory(dir), ds);
    context.getTasks().get(0).setUpdateMode(UpdateMode.REPLACE);
    context.getTasks().get(1).setUpdateMode(UpdateMode.APPEND);

    importer.run(context);

    fs = getCatalog().getFeatureTypeByName("archsites").getFeatureSource(null, null);
    int archsitesCount2 = fs.getCount(Query.ALL);
    fs = getCatalog().getFeatureTypeByName("bugsites").getFeatureSource(null, null);
    int bugsitesCount2 = fs.getCount(Query.ALL);

    // tasks might not be in same order
    if (context.getTasks().get(0).getLayer().getName().equals("archsites")) {
      assertEquals(archsitesCount, archsitesCount2);
      assertEquals(bugsitesCount * 2, bugsitesCount2);
    } else {
      assertEquals(archsitesCount * 2, archsitesCount2);
      assertEquals(bugsitesCount, bugsitesCount2);
    }
  }
Example #25
0
  /** Retrieve information about the feature geometry */
  private void setGeometry() {
    GeometryDescriptor geomDesc = featureSource.getSchema().getGeometryDescriptor();
    geometryAttributeName = geomDesc.getLocalName();

    Class<?> clazz = geomDesc.getType().getBinding();

    if (Polygon.class.isAssignableFrom(clazz) || MultiPolygon.class.isAssignableFrom(clazz)) {
      geometryType = GeomType.POLYGON;

    } else if (LineString.class.isAssignableFrom(clazz)
        || MultiLineString.class.isAssignableFrom(clazz)) {

      geometryType = GeomType.LINE;

    } else {
      geometryType = GeomType.POINT;
    }
  }
  @Override
  protected void setUpInternal(SystemTestData data) throws Exception {
    // run all the tests against a store that can do native paging (h2) and one that
    // can't (property)
    Catalog cat = getCatalog();
    DataStoreInfo ds = cat.getFactory().createDataStore();
    ds.setName("foo");
    ds.setWorkspace(cat.getDefaultWorkspace());

    Map params = ds.getConnectionParameters();
    params.put("dbtype", "h2");
    params.put("database", getTestData().getDataDirectoryRoot().getAbsolutePath());
    cat.add(ds);

    FeatureSource fs1 = getFeatureSource(SystemTestData.FIFTEEN);
    FeatureSource fs2 = getFeatureSource(SystemTestData.SEVEN);

    DataStore store = (DataStore) ds.getDataStore(null);
    SimpleFeatureTypeBuilder tb = new SimpleFeatureTypeBuilder();

    tb.init((SimpleFeatureType) fs1.getSchema());
    tb.add("num", Integer.class);
    tb.remove("boundedBy");
    store.createSchema(tb.buildFeatureType());

    tb.init((SimpleFeatureType) fs2.getSchema());
    tb.add("num", Integer.class);
    tb.remove("boundedBy");
    store.createSchema(tb.buildFeatureType());

    CatalogBuilder cb = new CatalogBuilder(cat);
    cb.setStore(ds);

    FeatureStore fs = (FeatureStore) store.getFeatureSource("Fifteen");
    addFeatures(fs, fs1.getFeatures());

    FeatureTypeInfo ft = cb.buildFeatureType(fs);
    cat.add(ft);

    fs = (FeatureStore) store.getFeatureSource("Seven");
    addFeatures(fs, fs2.getFeatures());

    ft = cb.buildFeatureType(fs);
    cat.add(ft);
  }
  @Ignore
  @Test
  public void testSaveAndLoad() throws Exception {
    EList list = project.eResource().getResourceSet().getResources();

    for (Iterator iter = list.iterator(); iter.hasNext(); ) {
      Resource element = (Resource) iter.next();
      try {
        element.save(null);
      } catch (Exception e) {
      }
      if (!element.getContents().contains(ProjectPlugin.getPlugin().getProjectRegistry()))
        element.unload();
    }

    ResourceSet set = new ResourceSetImpl();
    Project project =
        (Project)
            set.getResource(URI.createURI("file://" + file.getAbsolutePath()), true)
                .getAllContents()
                .next(); //$NON-NLS-1$
    assertFalse(project.eIsProxy());
    assertNotNull(project);
    int maps = 0;
    boolean foundFirstMap = false;
    boolean foundSecondMap = false;

    List resources = project.getElements();
    for (Iterator iter = resources.iterator(); iter.hasNext(); ) {
      Map map = (Map) iter.next();

      assertFalse(map.eIsProxy());
      assertEquals(1, map.getLayersInternal().size());
      assertNotNull(map.getLayersInternal().get(0).getGeoResources().get(0));
      assertNotNull(
          map.getLayersInternal()
              .get(0)
              .getResource(FeatureSource.class, new NullProgressMonitor()));

      if (map.getName().equals(firstMapName)) {
        foundFirstMap = true;
        assertEquals(firstMapLayerName, map.getLayersInternal().get(0).getName());
        FeatureSource<SimpleFeatureType, SimpleFeature> source =
            map.getLayersInternal().get(0).getResource(FeatureSource.class, null);
        assertEquals(4, source.getCount(Query.ALL));
        assertEquals(firstMapLayerName, map.getLayersInternal().get(0).getName());
      }
      if (map.getName().equals(secondMapName)) {
        foundSecondMap = true;
        assertEquals(secondMapLayerName, map.getLayersInternal().get(0).getName());
        FeatureSource<SimpleFeatureType, SimpleFeature> source =
            map.getLayersInternal().get(0).getResource(FeatureSource.class, null);
        assertEquals(6, source.getCount(Query.ALL));
        assertEquals(secondMapLayerName, map.getLayersInternal().get(0).getName());
      }
      maps++;
    }
    assertEquals(2, maps);
    assertTrue(foundFirstMap);
    assertTrue(foundSecondMap);
  }
  /**
   * Upload all the features from the WFS and then prepare the factories to fulfill the different
   * type of classifications and displays
   *
   * @throws DocServiceException
   */
  private void doClassification() throws DocServiceException {
    try {

      // connect to the remote WFS
      WFSContentDataStore wfs = connectToWFS(_command.getWFSUrl());

      // check if property name exists
      SimpleFeatureType ft = wfs.getSchema(_wfsngTypeName);
      int index = ft.indexOf(_command.getPropertyName());
      if (index == -1) {
        throw new DocServiceException(
            _command.getPropertyName() + " is not an attribute of " + _command.getFeatureTypeName(),
            HttpServletResponse.SC_BAD_REQUEST);
      }

      // Load all the features
      FeatureSource<SimpleFeatureType, SimpleFeature> source = wfs.getFeatureSource(_wfsngTypeName);
      FeatureCollection<SimpleFeatureType, SimpleFeature> featuresCollection = source.getFeatures();

      // We need a display (Symbolizers) and a value (Filters) fatories to generate a SLD file
      I_SymbolizerFactory symbolizerFact = null; // create symbols
      I_FilterFactory filterFact = null; // create filters

      // execute different type of classification given the type requested by user
      if (_command.getClassifType() == E_ClassifType.CHOROPLETHS
          || _command.getClassifType() == E_ClassifType.PROP_SYMBOLS) {

        // Classification on continuous values. Sorting is needed to classify:
        // Double values are mandatory (for now)

        if (getDataType(wfs) == String.class) {
          // choropleths and prop symbols use quantile classification
          // therefore classify on string type has no purpose
          throw new DocServiceException(
              "Classification on continous values ("
                  + _command.getClassifType()
                  + ").\n"
                  + "Attribute "
                  + _command.getPropertyName()
                  + " is string type."
                  + " Therefore no classification on contiuous values can be done."
                  + " It needs be a meaningful comparable type (numerical, date...)."
                  + " Use unique values classification instead.",
              HttpServletResponse.SC_BAD_REQUEST);
        } else if ((getDataType(wfs) != Double.class)
            && (getDataType(wfs) != Float.class)
            && (getDataType(wfs) != Integer.class)
            && (getDataType(wfs) != Long.class)
            && (getDataType(wfs) != Short.class)) {
          // for now, only double, float, integer, and short types are supported
          // FIXME deal with others numerical types, dates...
          // they all must be comparable type as sorting is required for classification
          throw new DocServiceException(
              "Classification on " + getDataType(wfs).getName() + " type is not supported.",
              HttpServletResponse.SC_NOT_IMPLEMENTED);
        }

        // get values to classify
        ArrayList<Double> values =
            getDoubleValues(featuresCollection.features(), _command.getPropertyName());
        filterFact =
            new ContinuousFilterFactory(
                values, _command.getClassCount(), _command.getPropertyName());

        if (_command.getClassifType() == E_ClassifType.CHOROPLETHS) {
          switch (_command.getSymbolType()) {
            case POLYGON:
              symbolizerFact =
                  new PolygonSymbolizerFactory(
                      _command.getClassCount(), _command.getFirstColor(), _command.getLastColor());
              break;
            case LINE:
              symbolizerFact =
                  new LineSymbolizerFactory(
                      _command.getClassCount(), _command.getFirstColor(), _command.getLastColor());
              break;
            case POINT:
              symbolizerFact =
                  new PointSymbolizerFactory(
                      _command.getClassCount(), _command.getFirstColor(), _command.getLastColor());
              break;
            default:
              throw new DocServiceException(
                  "Choropleths classification on symbol type: "
                      + _command.getSymbolType()
                      + " is not supported.",
                  HttpServletResponse.SC_BAD_REQUEST);
          }
        } else if (_command.getClassifType() == E_ClassifType.PROP_SYMBOLS) {
          switch (_command.getSymbolType()) {
            case LINE:
              symbolizerFact =
                  new LineSymbolizerFactory(
                      _command.getClassCount(), _command.getMinSize(), _command.getMaxSize());
              // customizing is possible
              // symbolizerFact.setColor(Color.BLUE);
              break;
            case POINT:
              symbolizerFact =
                  new PointSymbolizerFactory(
                      _command.getClassCount(), _command.getMinSize(), _command.getMaxSize());
              // customizing is possible
              // symbolizerFact.setColor(Color.BLUE);
              // symbolizerFact.setSymbol(StyleBuilder.MARK_CROSS);
              break;
            default:
              throw new DocServiceException(
                  "Proportional symbols classification on symbol type: "
                      + _command.getSymbolType()
                      + " is not supported.",
                  HttpServletResponse.SC_BAD_REQUEST);
          }
        }
      } else if (_command.getClassifType() == E_ClassifType.UNIQUE_VALUES) {

        // no needs to classify on Unique Values. They can be kept as Strings.
        Set<String> values =
            getUniqueStringValues(featuresCollection.features(), _command.getPropertyName());
        filterFact = new DiscreteFilterFactory(values, _command.getPropertyName());

        switch (_command.getSymbolType()) {
          case POLYGON:
            symbolizerFact = new PolygonSymbolizerFactory(_command.getPaletteID(), values.size());
            break;
          case LINE:
            symbolizerFact = new LineSymbolizerFactory(_command.getPaletteID(), values.size());
            break;
          case POINT:
            symbolizerFact = new PointSymbolizerFactory(_command.getPaletteID(), values.size());
            break;
          default:
            throw new DocServiceException(
                "Unique values classification on symbol type: "
                    + _command.getSymbolType()
                    + " is not supported.",
                HttpServletResponse.SC_BAD_REQUEST);
        }
      } else {
        throw new DocServiceException(
            "Unknown classification type: " + _command.getClassifType(),
            HttpServletResponse.SC_BAD_REQUEST);
      }

      assert (symbolizerFact != null);
      assert (filterFact != null);

      // With those 2 factories a FeatureTypeStyle can be created
      FeatureTypeStyle fts = createFeatureTypeStyle(filterFact, symbolizerFact);

      // Use FeatureTypeStyle to generate a complete SLD object
      _sld = createSLD(fts);
    } catch (IOException e) {
      e.printStackTrace(); // could happened when communicating with WFS
    }
  }
Example #29
0
  private List<ZGroupLayerPainter> buildLayerPainters(
      Graphics2D graphics,
      StreamingRenderer renderer,
      String layerId,
      ProgressListener cancellationListener)
      throws IOException, FactoryException, NoninvertibleTransformException, SchemaException,
          TransformException {
    List<ZGroupLayerPainter> painters = new ArrayList<>();
    boolean closePainters = true;
    try {
      for (Layer layer : layers) {
        // get the LiteFeatureTypeStyle for this layer
        final FeatureSource featureSource = layer.getFeatureSource();
        if (featureSource == null) {
          throw new IllegalArgumentException("The layer does not contain a feature source");
        }
        final FeatureType schema = featureSource.getSchema();

        final ArrayList<LiteFeatureTypeStyle> lfts =
            renderer.createLiteFeatureTypeStyles(layer, graphics, false);
        if (lfts.isEmpty()) {
          continue;
        } else {
          if (LOGGER.isLoggable(Level.FINE)) {
            LOGGER.fine("Processing " + lfts.size() + " stylers for " + schema.getName());
          }
        }

        // get the feature iterator we need
        FeatureCollection features = renderer.getFeatures(layer, schema, lfts);
        // While we could use a non mark feature iterator for single fts layers,
        // that would cause multiple connections to be open at the same time,
        // which in turn could cause deadlocks against connection pools, so we
        // are going to build a MarkFeatureIterator regardless
        // TODO: we could optimize down to simple streaming painting if we end up
        // with a single painter with a single fts (due to scale dependencies)
        // but we'd have to delay opening the MarkFeatureIterator to recognize the
        // situation
        int maxFeatures = SortedFeatureReader.getMaxFeaturesInMemory(layer.getQuery());
        MarkFeatureIterator fi =
            MarkFeatureIterator.create(features, maxFeatures, cancellationListener);
        if (fi.hasNext()) {
          ZGroupLayerPainter painter = new ZGroupLayerPainter(fi, lfts, renderer, layerId);
          painters.add(painter);
        } else {
          fi.close();
        }
      }

      // got to the end cleanly, no need to close the painters accumulated so far
      closePainters = false;
    } finally {
      if (closePainters) {
        for (ZGroupLayerPainter painter : painters) {
          try {
            painter.close();
          } catch (Exception e) {
            LOGGER.log(Level.FINE, "Failed to close cleanly layer painter " + painter, e);
          }
        }
      }
    }

    validateSortBy(painters);

    return painters;
  }
Example #30
0
 /**
  * Get the features as JSONArray with the given params
  *
  * @param al The application layer(if there is a application layer)
  * @param ft The featuretype that must be used to get the features
  * @param fs The featureSource
  * @param q The query
  * @param sort The attribute name that is used to sort
  * @param dir Sort direction (DESC or ASC)
  * @return JSONArray with features.
  * @throws IOException if any
  * @throws JSONException if transforming to json fails
  * @throws Exception if any
  */
 public JSONArray getJSONFeatures(
     ApplicationLayer al, SimpleFeatureType ft, FeatureSource fs, Query q, String sort, String dir)
     throws IOException, JSONException, Exception {
   Map<String, String> attributeAliases = new HashMap<String, String>();
   if (!edit) {
     for (AttributeDescriptor ad : ft.getAttributes()) {
       if (ad.getAlias() != null) {
         attributeAliases.put(ad.getName(), ad.getAlias());
       }
     }
   }
   List<String> propertyNames;
   if (al != null) {
     propertyNames = this.setPropertyNames(al, q, ft, edit);
   } else {
     propertyNames = new ArrayList<String>();
     for (AttributeDescriptor ad : ft.getAttributes()) {
       propertyNames.add(ad.getName());
     }
   }
   if (sort != null) {
     setSortBy(q, propertyNames, sort, dir);
   }
   /* Use the first property as sort field, otherwise geotools while give a error when quering
    * a JDBC featureType without a primary key.
    */
   else if (fs instanceof org.geotools.jdbc.JDBCFeatureSource && !propertyNames.isEmpty()) {
     setSortBy(q, propertyNames.get(0), dir);
   }
   Integer start = q.getStartIndex();
   if (start == null) {
     start = 0;
   }
   boolean offsetSupported = fs.getQueryCapabilities().isOffsetSupported();
   // if offSet is not supported, get more features (start + the wanted features)
   if (!offsetSupported && q.getMaxFeatures() < MAX_FEATURES) {
     q.setMaxFeatures(q.getMaxFeatures() + start);
   }
   FeatureIterator<SimpleFeature> it = null;
   JSONArray features = new JSONArray();
   try {
     it = fs.getFeatures(q).features();
     int featureIndex = 0;
     while (it.hasNext()) {
       SimpleFeature feature = it.next();
       /* if offset not supported and there are more features returned then
        * only get the features after index >= start*/
       if (offsetSupported || featureIndex >= start) {
         JSONObject j =
             this.toJSONFeature(
                 new JSONObject(), feature, ft, al, propertyNames, attributeAliases, 0);
         features.put(j);
       }
       featureIndex++;
     }
   } finally {
     if (it != null) {
       it.close();
     }
     fs.getDataStore().dispose();
   }
   return features;
 }