public void testImportCSV() throws Exception { File dir = unpack("csv/locations.zip"); ImportContext context = importer.createContext(new SpatialFile(new File(dir, "locations.csv"))); assertEquals(1, context.getTasks().size()); ImportTask task = context.getTasks().get(0); assertEquals(ImportTask.State.NO_CRS, task.getState()); LayerInfo layer = task.getLayer(); ResourceInfo resource = layer.getResource(); resource.setSRS("EPSG:4326"); assertTrue("Item not ready", importer.prep(task)); assertEquals(ImportTask.State.READY, task.getState()); context.updated(); assertEquals(ImportContext.State.PENDING, context.getState()); importer.run(context); assertEquals(ImportContext.State.COMPLETE, context.getState()); FeatureTypeInfo fti = (FeatureTypeInfo) resource; SimpleFeatureType featureType = (SimpleFeatureType) fti.getFeatureType(); GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor(); assertNull("Expecting no geometry", geometryDescriptor); assertEquals(4, featureType.getAttributeCount()); }
public void setNativeBox(Catalog catalog, String name) throws Exception { FeatureTypeInfo fti = catalog.getFeatureTypeByName(name); fti.setNativeBoundingBox(fti.getFeatureSource(null, null).getBounds()); fti.setLatLonBoundingBox( new ReferencedEnvelope(fti.getNativeBoundingBox(), DefaultGeographicCRS.WGS84)); catalog.save(fti); }
public void testImportKMLIndirect() throws Exception { File dir = unpack("kml/sample.zip"); String wsName = getCatalog().getDefaultWorkspace().getName(); DataStoreInfo h2DataStore = createH2DataStore(wsName, "kmltest"); SpatialFile importData = new SpatialFile(new File(dir, "sample.kml")); ImportContext context = importer.createContext(importData, h2DataStore); assertEquals(1, context.getTasks().size()); ImportTask task = context.getTasks().get(0); LayerInfo layer = task.getLayer(); ResourceInfo resource = layer.getResource(); assertEquals("Invalid srs", "EPSG:4326", resource.getSRS()); ReferencedEnvelope emptyBounds = new ReferencedEnvelope(); emptyBounds.setToNull(); assertTrue("Unexpected bounding box", emptyBounds.equals(resource.getNativeBoundingBox())); // transform chain to limit characters // otherwise we get a sql exception thrown TransformChain transformChain = task.getTransform(); transformChain.add(new DescriptionLimitingTransform()); importer.run(context); Exception error = task.getError(); if (error != null) { error.printStackTrace(); fail(error.getMessage()); } assertFalse("Bounding box not updated", emptyBounds.equals(resource.getNativeBoundingBox())); FeatureTypeInfo fti = (FeatureTypeInfo) resource; assertEquals("Invalid type name", "sample", fti.getName()); FeatureSource<? extends FeatureType, ? extends Feature> featureSource = fti.getFeatureSource(null, null); assertEquals("Unexpected feature count", 20, featureSource.getCount(Query.ALL)); }
public void testImportIntoDatabaseWithEncoding() throws Exception { Catalog cat = getCatalog(); DataStoreInfo ds = createH2DataStore(cat.getDefaultWorkspace().getName(), "ming"); File dir = tmpDir(); unpack("shape/ming_time.zip", dir); ImportContext context = importer.createContext(new Directory(dir), ds); assertEquals(1, context.getTasks().size()); context.getTasks().get(0).getData().setCharsetEncoding("UTF-8"); importer.run(context); FeatureTypeInfo info = (FeatureTypeInfo) context.getTasks().get(0).getLayer().getResource(); FeatureSource<? extends FeatureType, ? extends Feature> fs = info.getFeatureSource(null, null); FeatureCollection<? extends FeatureType, ? extends Feature> features = fs.getFeatures(); FeatureIterator<? extends Feature> it = features.features(); assertTrue(it.hasNext()); SimpleFeature next = (SimpleFeature) it.next(); // let's test some attributes to see if they were digested properly String type_ch = (String) next.getAttribute("type_ch"); assertEquals("卫", type_ch); String name_ch = (String) next.getAttribute("name_ch"); assertEquals("杭州前卫", name_ch); it.close(); }
@Before public void setUp() throws URISyntaxException, IOException { outputFormat = new HTMLFeatureInfoOutputFormat(getWMS()); currentTemplate = "test_content.ftl"; // configure template loader GeoServerTemplateLoader templateLoader = new GeoServerTemplateLoader(this.getClass(), getDataDirectory()) { @Override public Object findTemplateSource(String path) throws IOException { String templatePath; if (path.toLowerCase().contains("content")) { templatePath = currentTemplate; } else { templatePath = "empty.ftl"; } try { return new File(this.getClass().getResource(templateFolder + templatePath).toURI()); } catch (URISyntaxException e) { return null; } } }; outputFormat.templateLoader = templateLoader; // test request with some parameters to use in templates Request request = new Request(); parameters = new HashMap<String, Object>(); parameters.put("LAYER", "testLayer"); Map<String, String> env = new HashMap<String, String>(); env.put("TEST1", "VALUE1"); env.put("TEST2", "VALUE2"); parameters.put("ENV", env); request.setKvp(parameters); Dispatcher.REQUEST.set(request); final FeatureTypeInfo featureType = getFeatureTypeInfo(MockData.PRIMITIVEGEOFEATURE); fcType = WfsFactory.eINSTANCE.createFeatureCollectionType(); fcType.getFeature().add(featureType.getFeatureSource(null, null).getFeatures()); // fake layer list List<MapLayerInfo> queryLayers = new ArrayList<MapLayerInfo>(); LayerInfo layerInfo = new LayerInfoImpl(); layerInfo.setType(PublishedType.VECTOR); ResourceInfo resourceInfo = new FeatureTypeInfoImpl(null); NamespaceInfo nameSpace = new NamespaceInfoImpl(); nameSpace.setPrefix("topp"); nameSpace.setURI("http://www.topp.org"); resourceInfo.setNamespace(nameSpace); layerInfo.setResource(resourceInfo); MapLayerInfo mapLayerInfo = new MapLayerInfo(layerInfo); queryLayers.add(mapLayerInfo); getFeatureInfoRequest = new GetFeatureInfoRequest(); getFeatureInfoRequest.setQueryLayers(queryLayers); }
private LayerReference vectorLayer() throws IOException { Catalog catalog = getCatalog(); FeatureTypeInfo typeInfo = catalog.getFeatureTypeByName(VECTOR_LAYER_NAME); FeatureSource<? extends FeatureType, ? extends Feature> source; source = typeInfo.getFeatureSource(null, null); LayerReference layerReference = new LayerReference(VECTOR_LAYER_NAME, source); return layerReference; }
@Test public void testForce() throws Exception { FeatureTypeInfo fti = getCatalog().getFeatureTypeByName(MockData.BASIC_POLYGONS.getLocalPart()); assertEquals("EPSG:4269", fti.getSRS()); assertEquals(ProjectionPolicy.FORCE_DECLARED, fti.getProjectionPolicy()); FeatureCollection fc = fti.getFeatureSource(null, null).getFeatures(); assertEquals(CRS.decode("EPSG:4269"), fc.getSchema().getCoordinateReferenceSystem()); FeatureIterator fi = fc.features(); Feature f = fi.next(); fi.close(); assertEquals(CRS.decode("EPSG:4269"), f.getType().getCoordinateReferenceSystem()); }
@BeforeClass public static void setUpData() throws Exception { MonitorDAO dao = new MemoryMonitorDAO(); new MonitorTestData(dao).setup(); MonitorConfig mc = new MonitorConfig() { @Override public MonitorDAO createDAO() { MonitorDAO dao = new MemoryMonitorDAO(); try { new MonitorTestData(dao).setup(); return dao; } catch (java.text.ParseException e) { throw new RuntimeException(e); } } @Override public BboxMode getBboxMode() { return BboxMode.FULL; } }; GeoServer gs = createMock(GeoServer.class); monitor = new Monitor(mc); monitor.setServer(gs); catalog = new CatalogImpl(); expect(gs.getCatalog()).andStubReturn(catalog); replay(gs); NamespaceInfo ns = catalog.getFactory().createNamespace(); ns.setPrefix("acme"); ns.setURI("http://acme.org"); catalog.add(ns); DataStoreInfo ds = catalog.getFactory().createDataStore(); FeatureTypeInfo ftFoo = catalog.getFactory().createFeatureType(); ftFoo.setName("foo"); ftFoo.setSRS("EPSG:4326"); ftFoo.setNamespace(ns); ftFoo.setStore(ds); catalog.add(ftFoo); FeatureTypeInfo ftBar = catalog.getFactory().createFeatureType(); ftBar.setName("bar"); ftBar.setSRS("EPSG:3348"); ftBar.setNamespace(ns); ftBar.setStore(ds); catalog.add(ftBar); }
public void testImportCSVIndirect() throws Exception { File dir = unpack("csv/locations.zip"); String wsName = getCatalog().getDefaultWorkspace().getName(); DataStoreInfo h2DataStore = createH2DataStore(wsName, "csvindirecttest"); SpatialFile importData = new SpatialFile(new File(dir, "locations.csv")); ImportContext context = importer.createContext(importData, h2DataStore); assertEquals(1, context.getTasks().size()); ImportTask task = context.getTasks().get(0); TransformChain transformChain = task.getTransform(); transformChain.add(new AttributesToPointGeometryTransform("LAT", "LON")); assertEquals(ImportTask.State.NO_CRS, task.getState()); LayerInfo layer = task.getLayer(); ResourceInfo resource = layer.getResource(); resource.setSRS("EPSG:4326"); assertTrue("Item not ready", importer.prep(task)); assertEquals(ImportTask.State.READY, task.getState()); context.updated(); assertEquals(ImportContext.State.PENDING, context.getState()); importer.run(context); assertEquals(ImportContext.State.COMPLETE, context.getState()); FeatureTypeInfo fti = (FeatureTypeInfo) resource; SimpleFeatureType featureType = (SimpleFeatureType) fti.getFeatureType(); GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor(); assertNotNull("Expecting geometry", geometryDescriptor); assertEquals("Invalid geometry name", "location", geometryDescriptor.getLocalName()); assertEquals(3, featureType.getAttributeCount()); FeatureSource<? extends FeatureType, ? extends Feature> featureSource = fti.getFeatureSource(null, null); FeatureCollection<? extends FeatureType, ? extends Feature> features = featureSource.getFeatures(); assertEquals(9, features.size()); FeatureIterator<? extends Feature> featureIterator = features.features(); assertTrue("Expected features", featureIterator.hasNext()); SimpleFeature feature = (SimpleFeature) featureIterator.next(); assertNotNull(feature); assertEquals("Invalid city attribute", "Trento", feature.getAttribute("CITY")); assertEquals("Invalid number attribute", 140, feature.getAttribute("NUMBER")); Object geomAttribute = feature.getAttribute("location"); assertNotNull("Expected geometry", geomAttribute); Point point = (Point) geomAttribute; Coordinate coordinate = point.getCoordinate(); assertEquals("Invalid x coordinate", 11.12, coordinate.x, 0.1); assertEquals("Invalid y coordinate", 46.07, coordinate.y, 0.1); featureIterator.close(); }
/** * @param dataStore * @throws IOException */ private void checkProcessingAllowed(JDBCDataStore dataStore, int processing) throws IOException { // access through catalog to let the security jump in FeatureTypeInfo ft = catalog.getResourceByName("destination", "siig_mtd_d_elaborazione", FeatureTypeInfo.class); FeatureSource source = ft.getFeatureSource(null, null); FeatureCollection fc = source.getFeatures(ff2.equals(ff2.property("id_elaborazione"), ff2.literal(processing))); // check if the given processing is allowed (if not is filtered out from results // so size should be 0) if (fc.size() != 1) { throw new ProcessException("Operation not allowed"); } }
private void buildGlobalSiteMap(Element sitemapindex) { for (FeatureTypeInfo ft : getData().getFeatureTypes()) { try { if ((Boolean) ft.getMetadata().get("indexingEnabled")) { String ftSitemap = GEOSERVER_ROOT + "/layers/" + ft.getName() + "/sitemap.xml"; addSitemap(sitemapindex, ftSitemap); } } catch (Exception e) { // Do nothing ? } } }
@Test public void testModifyFeatureType() throws Exception { testAddFeatureType(); NamespaceInfo ns = dao.getNamespaceByPrefix("acme"); FeatureTypeInfo ft = dao.getResourceByName(ns, "anvil", FeatureTypeInfo.class); ft.setName("dynamite"); dao.save(ft); assertNull(dao.getResourceByName(ns, "anvil", FeatureTypeInfo.class)); assertNotNull(dao.getResourceByName(ns, "dynamite", FeatureTypeInfo.class)); }
@Override public boolean canHandle(Object obj) { boolean canHandle = false; if (obj instanceof FeatureTypeInfo) { FeatureTypeInfo fti = (FeatureTypeInfo) obj; for (String st : getSupportedTypes()) { if (fti.getStore().getType().equals(st)) { canHandle = true; break; } } } return canHandle; }
@Test public void testWithRename() throws Exception { FeatureTypeInfo fti = getCatalog().getFeatureTypeByName("MyPoints"); assertEquals("EPSG:4326", fti.getSRS()); assertEquals(ProjectionPolicy.REPROJECT_TO_DECLARED, fti.getProjectionPolicy()); FeatureCollection fc = fti.getFeatureSource(null, null).getFeatures(); assertEquals(CRS.decode("EPSG:4326"), fc.getSchema().getCoordinateReferenceSystem()); FeatureIterator fi = fc.features(); Feature f = fi.next(); // test that geometry was reprojected Geometry g = (Geometry) f.getDefaultGeometryProperty().getValue(); assertFalse(g.equalsExact(WKT.read("POINT(500050 500050)"))); fi.close(); assertEquals(CRS.decode("EPSG:4326"), f.getType().getCoordinateReferenceSystem()); }
private JSONObj resource(JSONObj json, ResourceInfo info, boolean details) { json.put("name", info.getName()).put("workspace", info.getStore().getWorkspace().getName()); if (details) { if (info instanceof FeatureTypeInfo) { FeatureTypeInfo data = (FeatureTypeInfo) info; try { IO.schema(json.putObject("schema"), data.getFeatureType(), false); } catch (IOException e) { } } else if (info instanceof CoverageInfo) { CoverageInfo data = (CoverageInfo) info; IO.schemaGrid(json.putObject("schema"), data, false); } } return json; }
@Test public void testLeaveNative() throws Exception { FeatureTypeInfo fti = getCatalog().getFeatureTypeByName(MockData.LINES.getLocalPart()); assertEquals("EPSG:3004", fti.getSRS()); assertEquals(ProjectionPolicy.NONE, fti.getProjectionPolicy()); FeatureCollection fc = fti.getFeatureSource(null, null).getFeatures(); assertEquals(CRS.decode("EPSG:32615"), fc.getSchema().getCoordinateReferenceSystem()); FeatureIterator fi = fc.features(); Feature f = fi.next(); // test that the geometry was left in tact Geometry g = (Geometry) f.getDefaultGeometryProperty().getValue(); assertTrue(g.equalsExact(WKT.read("LINESTRING(500125 500025,500175 500075)"))); fi.close(); assertEquals(CRS.decode("EPSG:32615"), f.getType().getCoordinateReferenceSystem()); }
public void testIntegerToDateTransform() throws Exception { Catalog cat = getCatalog(); File dir = unpack("shape/archsites_epsg_prj.zip"); SpatialFile file = new SpatialFile(new File(dir, "archsites.shp")); file.prepare(); ImportContext context = importer.createContext(file, store); assertEquals(1, context.getTasks().size()); context.setTargetStore(store); ImportTask task = context.getTasks().get(0); // this is a silly test - CAT_ID ranges from 1-25 and is not supposed to be a date // java date handling doesn't like dates in year 1 task.getTransform().add(new IntegerFieldToDateTransform("CAT_ID")); importer.run(context); assertEquals(ImportContext.State.COMPLETE, context.getState()); FeatureTypeInfo ft = cat.getFeatureTypeByDataStore(store, "archsites"); assertNotNull(ft); SimpleFeatureType schema = (SimpleFeatureType) ft.getFeatureType(); assertEquals(Timestamp.class, schema.getDescriptor("CAT_ID").getType().getBinding()); FeatureIterator it = ft.getFeatureSource(null, null).getFeatures().features(); int year = 2; Calendar cal = Calendar.getInstance(); cal.setTimeZone(TimeZone.getTimeZone("UTC")); try { // make sure we have something assertTrue(it.hasNext()); // the first date will be bogus due to java date limitation it.next(); while (it.hasNext()) { SimpleFeature f = (SimpleFeature) it.next(); // class will be timestamp cal.setTime((Date) f.getAttribute("CAT_ID")); assertEquals(year++, cal.get(Calendar.YEAR)); } } finally { it.close(); } }
/** * Returns the appropriate icon for the specified layer. This one distinguishes the geometry type * inside vector layers. * * @param info * @return */ public ResourceReference getSpecificLayerIcon(LayerInfo info) { if (info.getType() == Type.RASTER) { return RASTER_ICON; } else if (info.getType() == Type.VECTOR) { try { FeatureTypeInfo fti = (FeatureTypeInfo) info.getResource(); GeometryDescriptor gd = fti.getFeatureType().getGeometryDescriptor(); return getVectoryIcon(gd); } catch (Exception e) { return GEOMETRY_ICON; } } else if (info.getType() == Type.WMS) { return MAP_ICON; } else { return UNKNOWN_ICON; } }
/** * Returns the collection of all values of the dimension attribute, eventually sorted if the * native capabilities allow for it * * @param typeInfo * @param dimension * @return * @throws IOException */ FeatureCollection getDimensionCollection(FeatureTypeInfo typeInfo, DimensionInfo dimension) throws IOException { // grab the feature source FeatureSource source = null; try { source = typeInfo.getFeatureSource(null, GeoTools.getDefaultHints()); } catch (IOException e) { throw new ServiceException( "Could not get the feauture source to list time info for layer " + typeInfo.getPrefixedName(), e); } // build query to grab the dimension values final Query dimQuery = new Query(source.getSchema().getName().getLocalPart()); dimQuery.setPropertyNames(Arrays.asList(dimension.getAttribute())); return source.getFeatures(dimQuery); }
/** * Returns the list of elevation values for the specified typeInfo based on the dimension * representation: all values for {@link DimensionPresentation#LIST}, otherwise min and max * * @param typeInfo * @return * @throws IOException */ public TreeSet<Double> getFeatureTypeElevations(FeatureTypeInfo typeInfo) throws IOException { // grab the time metadata DimensionInfo elevation = typeInfo.getMetadata().get(ResourceInfo.ELEVATION, DimensionInfo.class); if (elevation == null || !elevation.isEnabled()) { throw new ServiceException( "Layer " + typeInfo.getPrefixedName() + " does not have elevation support enabled"); } FeatureCollection collection = getDimensionCollection(typeInfo, elevation); TreeSet<Double> result = new TreeSet<Double>(); if (elevation.getPresentation() == DimensionPresentation.LIST || (elevation.getPresentation() == DimensionPresentation.DISCRETE_INTERVAL && elevation.getResolution() == null)) { final UniqueVisitor visitor = new UniqueVisitor(elevation.getAttribute()); collection.accepts(visitor, null); @SuppressWarnings("unchecked") Set<Object> values = visitor.getUnique(); if (values.size() <= 0) { result = null; } else { for (Object value : values) { result.add(((Number) value).doubleValue()); } } } else { final MinVisitor min = new MinVisitor(elevation.getAttribute()); collection.accepts(min, null); // check calcresult first to avoid potential IllegalStateException if no features are in // collection CalcResult calcResult = min.getResult(); if (calcResult != CalcResult.NULL_RESULT) { result.add(((Number) min.getMin()).doubleValue()); final MaxVisitor max = new MaxVisitor(elevation.getAttribute()); collection.accepts(max, null); result.add(((Number) max.getMax()).doubleValue()); } } return result; }
@Test public void testAddFeatureType() throws Exception { testAddDataStore(); testAddNamespace(); DataStoreInfo ds = dao.getStoreByName(dao.getWorkspaceByName("acme"), "widgets", DataStoreInfo.class); NamespaceInfo ns = dao.getNamespaceByPrefix("acme"); assertNull(dao.getResourceByName(ns, "anvil", FeatureTypeInfo.class)); FeatureTypeInfo ft = dao.getCatalog().getFactory().createFeatureType(); ft.setName("anvil"); ft.setNativeName("anvil"); ft.setStore(ds); ft.setNamespace(ns); dao.add(ft); assertNotNull(dao.getResourceByName(ns, "anvil", FeatureTypeInfo.class)); }
/** * Returns the list of time values for the specified typeInfo based on the dimension * representation: all values for {@link DimensionPresentation#LIST}, otherwise min and max * * @param typeInfo * @return * @throws IOException */ public TreeSet<Date> getFeatureTypeTimes(FeatureTypeInfo typeInfo) throws IOException { // grab the time metadata DimensionInfo time = typeInfo.getMetadata().get(ResourceInfo.TIME, DimensionInfo.class); if (time == null || !time.isEnabled()) { throw new ServiceException( "Layer " + typeInfo.getPrefixedName() + " does not have time support enabled"); } FeatureCollection collection = getDimensionCollection(typeInfo, time); TreeSet<Date> result = new TreeSet<Date>(); if (time.getPresentation() == DimensionPresentation.LIST) { final UniqueVisitor visitor = new UniqueVisitor(time.getAttribute()); collection.accepts(visitor, null); @SuppressWarnings("unchecked") Set<Date> values = visitor.getUnique(); if (values.size() <= 0) { result = null; } else { // we might get null values out of the visitor, strip them values.remove(null); result.addAll(values); } } else { final MinVisitor min = new MinVisitor(time.getAttribute()); collection.accepts(min, null); CalcResult minResult = min.getResult(); // check calcresult first to avoid potential IllegalStateException if no features are in // collection if (minResult != CalcResult.NULL_RESULT) { result.add((Date) min.getMin()); final MaxVisitor max = new MaxVisitor(time.getAttribute()); collection.accepts(max, null); result.add((Date) max.getMax()); } } return result; }
/** * Builds a filter for the current time and elevation, should the layer support them. Only one * among time and elevation can be multi-valued * * @param layerFilter * @param currentTime * @param currentElevation * @param mapLayerInfo * @return */ public Filter getTimeElevationToFilter( List<Object> times, List<Object> elevations, FeatureTypeInfo typeInfo) throws IOException { DimensionInfo timeInfo = typeInfo.getMetadata().get(ResourceInfo.TIME, DimensionInfo.class); DimensionInfo elevationInfo = typeInfo.getMetadata().get(ResourceInfo.ELEVATION, DimensionInfo.class); DimensionFilterBuilder builder = new DimensionFilterBuilder(ff); // handle time support if (timeInfo != null && timeInfo.isEnabled() && times != null) { List<Object> defaultedTimes = new ArrayList<Object>(times.size()); for (Object datetime : times) { if (datetime == null) { // this is "default" datetime = getDefaultTime(typeInfo); } defaultedTimes.add(datetime); } builder.appendFilters(timeInfo.getAttribute(), timeInfo.getEndAttribute(), defaultedTimes); } // handle elevation support if (elevationInfo != null && elevationInfo.isEnabled() && elevations != null) { List<Object> defaultedElevations = new ArrayList<Object>(elevations.size()); for (Object elevation : elevations) { if (elevation == null) { // this is "default" elevation = getDefaultElevation(typeInfo); } defaultedElevations.add(elevation); } builder.appendFilters( elevationInfo.getAttribute(), elevationInfo.getEndAttribute(), defaultedElevations); } Filter result = builder.getFilter(); return result; }
public void testDateFormatTransform() throws Exception { Catalog cat = getCatalog(); File dir = unpack("shape/ivan.zip"); SpatialFile file = new SpatialFile(new File(dir, "ivan.shp")); file.prepare(); ImportContext context = importer.createContext(file, store); assertEquals(1, context.getTasks().size()); context.setTargetStore(store); ImportTask task = context.getTasks().get(0); task.getTransform().add(new DateFormatTransform("timestamp", "yyyy-MM-dd HH:mm:ss.S")); importer.run(context); assertEquals(ImportContext.State.COMPLETE, context.getState()); FeatureTypeInfo ft = cat.getFeatureTypeByDataStore(store, "ivan"); assertNotNull(ft); SimpleFeatureType schema = (SimpleFeatureType) ft.getFeatureType(); assertTrue( Date.class.isAssignableFrom(schema.getDescriptor("timestamp").getType().getBinding())); FeatureIterator it = ft.getFeatureSource(null, null).getFeatures().features(); try { assertTrue(it.hasNext()); while (it.hasNext()) { SimpleFeature f = (SimpleFeature) it.next(); assertTrue(f.getAttribute("timestamp") instanceof Date); } } finally { it.close(); } }
/** * If this layers has been setup to reproject data, skipReproject = true will disable * reprojection. This method is build especially for the rendering subsystem that should be able * to perform a full reprojection on its own, and do generalization before reprojection (thus * avoid to reproject all of the original coordinates) */ public FeatureSource<? extends FeatureType, ? extends Feature> getFeatureSource( boolean skipReproject) throws IOException { if (type != TYPE_VECTOR) { throw new IllegalArgumentException("Layer type is not vector"); } // ask for enabled() instead of isEnabled() to account for disabled resource/store if (!layerInfo.enabled()) { throw new IOException( "featureType: " + getName() + " does not have a properly configured " + "datastore"); } FeatureTypeInfo resource = (FeatureTypeInfo) layerInfo.getResource(); if (resource.getStore() == null || resource.getStore().getDataStore(null) == null) { throw new IOException( "featureType: " + getName() + " does not have a properly configured " + "datastore"); } Hints hints = new Hints(ResourcePool.REPROJECT, Boolean.valueOf(!skipReproject)); return resource.getFeatureSource(null, hints); }
public void testNumberFormatTransform() throws Exception { Catalog cat = getCatalog(); File dir = unpack("shape/restricted.zip"); SpatialFile file = new SpatialFile(new File(dir, "restricted.shp")); file.prepare(); ImportContext context = importer.createContext(file, store); assertEquals(1, context.getTasks().size()); context.setTargetStore(store); ImportTask task = context.getTasks().get(0); task.getTransform().add(new NumberFormatTransform("cat", Integer.class)); importer.run(context); assertEquals(ImportContext.State.COMPLETE, context.getState()); FeatureTypeInfo ft = cat.getFeatureTypeByDataStore(store, "restricted"); assertNotNull(ft); SimpleFeatureType schema = (SimpleFeatureType) ft.getFeatureType(); assertEquals(Integer.class, schema.getDescriptor("cat").getType().getBinding()); FeatureIterator it = ft.getFeatureSource(null, null).getFeatures().features(); try { assertTrue(it.hasNext()); while (it.hasNext()) { SimpleFeature f = (SimpleFeature) it.next(); assertTrue(f.getAttribute("cat") instanceof Integer); } } finally { it.close(); } }
/** Reads the catalog from disk. */ Catalog readCatalog(XStreamPersister xp) throws Exception { CatalogImpl catalog = new CatalogImpl(); catalog.setResourceLoader(resourceLoader); xp.setCatalog(catalog); xp.setUnwrapNulls(false); CatalogFactory factory = catalog.getFactory(); // global styles loadStyles(resourceLoader.find("styles"), catalog, xp); // workspaces, stores, and resources File workspaces = resourceLoader.find("workspaces"); if (workspaces != null) { // do a first quick scan over all workspaces, setting the default File dws = new File(workspaces, "default.xml"); WorkspaceInfo defaultWorkspace = null; if (dws.exists()) { try { defaultWorkspace = depersist(xp, dws, WorkspaceInfo.class); LOGGER.info("Loaded default workspace " + defaultWorkspace.getName()); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load default workspace", e); } } else { LOGGER.warning("No default workspace was found."); } for (File wsd : list(workspaces, DirectoryFileFilter.INSTANCE)) { File f = new File(wsd, "workspace.xml"); if (!f.exists()) { continue; } WorkspaceInfo ws = null; try { ws = depersist(xp, f, WorkspaceInfo.class); catalog.add(ws); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load workspace '" + wsd.getName() + "'", e); continue; } LOGGER.info("Loaded workspace '" + ws.getName() + "'"); // load the namespace File nsf = new File(wsd, "namespace.xml"); NamespaceInfo ns = null; if (nsf.exists()) { try { ns = depersist(xp, nsf, NamespaceInfo.class); catalog.add(ns); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load namespace for '" + wsd.getName() + "'", e); } } // set the default workspace, this value might be null in the case of coming from a // 2.0.0 data directory. See http://jira.codehaus.org/browse/GEOS-3440 if (defaultWorkspace != null) { if (ws.getName().equals(defaultWorkspace.getName())) { catalog.setDefaultWorkspace(ws); if (ns != null) { catalog.setDefaultNamespace(ns); } } } else { // create the default.xml file defaultWorkspace = catalog.getDefaultWorkspace(); if (defaultWorkspace != null) { try { persist(xp, defaultWorkspace, dws); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to persist default workspace '" + wsd.getName() + "'", e); } } } // load the styles for the workspace File styles = resourceLoader.find(wsd, "styles"); if (styles != null) { loadStyles(styles, catalog, xp); } } for (File wsd : list(workspaces, DirectoryFileFilter.INSTANCE)) { // load the stores for this workspace for (File sd : list(wsd, DirectoryFileFilter.INSTANCE)) { File f = new File(sd, "datastore.xml"); if (f.exists()) { // load as a datastore DataStoreInfo ds = null; try { ds = depersist(xp, f, DataStoreInfo.class); catalog.add(ds); LOGGER.info("Loaded data store '" + ds.getName() + "'"); if (ds.isEnabled()) { // connect to the datastore to determine if we should disable it try { ds.getDataStore(null); } catch (Throwable t) { LOGGER.warning("Error connecting to '" + ds.getName() + "'. Disabling."); LOGGER.log(Level.INFO, "", t); ds.setError(t); ds.setEnabled(false); } } } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load data store '" + sd.getName() + "'", e); continue; } // load feature types for (File ftd : list(sd, DirectoryFileFilter.INSTANCE)) { f = new File(ftd, "featuretype.xml"); if (f.exists()) { FeatureTypeInfo ft = null; try { ft = depersist(xp, f, FeatureTypeInfo.class); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load feature type '" + ftd.getName() + "'", e); continue; } catalog.add(ft); LOGGER.info("Loaded feature type '" + ds.getName() + "'"); f = new File(ftd, "layer.xml"); if (f.exists()) { try { LayerInfo l = depersist(xp, f, LayerInfo.class); catalog.add(l); LOGGER.info("Loaded layer '" + l.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load layer for feature type '" + ft.getName() + "'", e); } } } else { LOGGER.warning("Ignoring feature type directory " + ftd.getAbsolutePath()); } } } else { // look for a coverage store f = new File(sd, "coveragestore.xml"); if (f.exists()) { CoverageStoreInfo cs = null; try { cs = depersist(xp, f, CoverageStoreInfo.class); catalog.add(cs); LOGGER.info("Loaded coverage store '" + cs.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load coverage store '" + sd.getName() + "'", e); continue; } // load coverages for (File cd : list(sd, DirectoryFileFilter.INSTANCE)) { f = new File(cd, "coverage.xml"); if (f.exists()) { CoverageInfo c = null; try { c = depersist(xp, f, CoverageInfo.class); catalog.add(c); LOGGER.info("Loaded coverage '" + cs.getName() + "'"); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load coverage '" + cd.getName() + "'", e); continue; } f = new File(cd, "layer.xml"); if (f.exists()) { try { LayerInfo l = depersist(xp, f, LayerInfo.class); catalog.add(l); LOGGER.info("Loaded layer '" + l.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load layer coverage '" + c.getName() + "'", e); } } } else { LOGGER.warning("Ignoring coverage directory " + cd.getAbsolutePath()); } } } else { f = new File(sd, "wmsstore.xml"); if (f.exists()) { WMSStoreInfo wms = null; try { wms = depersist(xp, f, WMSStoreInfo.class); catalog.add(wms); LOGGER.info("Loaded wmsstore '" + wms.getName() + "'"); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load wms store '" + sd.getName() + "'", e); continue; } // load wms layers for (File cd : list(sd, DirectoryFileFilter.INSTANCE)) { f = new File(cd, "wmslayer.xml"); if (f.exists()) { WMSLayerInfo wl = null; try { wl = depersist(xp, f, WMSLayerInfo.class); catalog.add(wl); LOGGER.info("Loaded wms layer'" + wl.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load wms layer '" + cd.getName() + "'", e); continue; } f = new File(cd, "layer.xml"); if (f.exists()) { try { LayerInfo l = depersist(xp, f, LayerInfo.class); catalog.add(l); LOGGER.info("Loaded layer '" + l.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load cascaded wms layer '" + wl.getName() + "'", e); } } } else { LOGGER.warning("Ignoring coverage directory " + cd.getAbsolutePath()); } } } else if (!isConfigDirectory(sd)) { LOGGER.warning("Ignoring store directory '" + sd.getName() + "'"); continue; } } } } // load hte layer groups for this workspace File layergroups = resourceLoader.find(wsd, "layergroups"); if (layergroups != null) { loadLayerGroups(layergroups, catalog, xp); } } } else { LOGGER.warning("No 'workspaces' directory found, unable to load any stores."); } // namespaces // layergroups File layergroups = resourceLoader.find("layergroups"); if (layergroups != null) { loadLayerGroups(layergroups, catalog, xp); } xp.setUnwrapNulls(true); catalog.resolve(); return catalog; }
public MockCatalogBuilder featureType( final String name, String srs, ProjectionPolicy projPolicy, ReferencedEnvelope envelope, ReferencedEnvelope latLonEnvelope) { String ftId = newId(); final DataStoreInfo ds = dataStores.peekLast(); NamespaceInfo ns = namespaces.peekLast(); final FeatureTypeInfo ft = createNiceMock(FeatureTypeInfo.class); featureTypes.add(ft); initResource( ft, FeatureTypeInfo.class, ftId, name, ds, ns, srs, projPolicy, envelope, latLonEnvelope); expect(ft.getNumDecimals()).andReturn(8); // setup the property file data File propDir = new File(dataDirRoot, ds.getName()); propDir.mkdirs(); String fileName = name + ".properties"; try { IOUtils.copy(getClass().getResourceAsStream(fileName), new File(propDir, fileName)); } catch (IOException e) { throw new RuntimeException(e); } try { expect(ft.getFeatureType()) .andAnswer( new IAnswer<FeatureType>() { @Override public FeatureType answer() throws Throwable { return ((DataStore) ds.getDataStore(null)).getSchema(name); } }) .anyTimes(); expect(ft.getFeatureSource(null, null)) .andAnswer( (IAnswer) new IAnswer<FeatureSource>() { @Override public FeatureSource answer() throws Throwable { return ((DataStore) ds.getDataStore(null)).getFeatureSource(name); } }) .anyTimes(); } catch (IOException e) { } expect(catalog.getFeatureTypeByName(or(eq(name), eq(ns.getPrefix() + ":" + name)))) .andReturn(ft) .anyTimes(); expect( catalog.getFeatureTypeByName( or(eq(new NameImpl(ns.getPrefix(), name)), eq(new NameImpl(ns.getURI(), name))))) .andReturn(ft) .anyTimes(); expect(catalog.getFeatureTypeByName(ns, name)).andReturn(ft).anyTimes(); expect(catalog.getFeatureTypeByName(ns.getPrefix(), name)).andReturn(ft).anyTimes(); // expect(catalog.getFeatureTypeByName(or(eq(ns.getPrefix()), eq(ns.getURI())), name)) // .andReturn(ft).anyTimes(); expect(catalog.getFeatureTypeByStore(ds, name)).andReturn(ft).anyTimes(); expect(catalog.getFeatureTypeByDataStore(ds, name)).andReturn(ft).anyTimes(); ft.accept((CatalogVisitor) anyObject()); expectLastCall() .andAnswer( new VisitAnswer() { @Override protected void doVisit(CatalogVisitor visitor) { visitor.visit(ft); } }) .anyTimes(); callback.onResource(name, ft, ds, this); replay(ft, createLayer(ft, name, ns)); return this; }
public ValueCollectionType run(GetPropertyValueType request) throws WFSException { // check the request resolve if (request.isSetResolve() && !ResolveValueType.NONE.equals(request.getResolve())) { throw new WFSException(request, "Only resolve = none is supported", "InvalidParameterValue") .locator("resolve"); } if (request.getValueReference() == null) { throw new WFSException(request, "No valueReference specified", "MissingParameterValue") .locator("valueReference"); } // do a getFeature request GetFeatureType getFeature = Wfs20Factory.eINSTANCE.createGetFeatureType(); getFeature.getAbstractQueryExpression().add(request.getAbstractQueryExpression()); FeatureCollectionType fc = (FeatureCollectionType) delegate.run(GetFeatureRequest.adapt(getFeature)).getAdaptee(); QueryType query = (QueryType) request.getAbstractQueryExpression(); QName typeName = (QName) query.getTypeNames().iterator().next(); FeatureTypeInfo featureType = catalog.getFeatureTypeByName(typeName.getNamespaceURI(), typeName.getLocalPart()); try { // look for the attribute type AttributeTypeInfo attribute = null; for (AttributeTypeInfo at : featureType.attributes()) { if (at.getName().equals(request.getValueReference())) { attribute = at; break; } } if (attribute == null) { throw new WFSException(request, "No such attribute: " + request.getValueReference()); } AttributeDescriptor descriptor = attribute.getAttribute(); if (descriptor == null) { PropertyDescriptor pd = featureType.getFeatureType().getDescriptor(attribute.getName()); if (pd instanceof AttributeDescriptor) { descriptor = (AttributeDescriptor) pd; } } if (descriptor == null) { throw new WFSException(request, "Unable to obtain descriptor for " + attribute.getName()); } // create value collection type from feature collection ValueCollectionType vc = Wfs20Factory.eINSTANCE.createValueCollectionType(); vc.setTimeStamp(fc.getTimeStamp()); vc.setNumberMatched(fc.getNumberMatched()); vc.setNumberReturned(fc.getNumberReturned()); vc.getMember().add(new PropertyValueCollection(fc.getMember().iterator().next(), descriptor)); // TODO: next/previous but point back at GetPropertyValue // vc.setNext(fc.getNext()); // vc.setPrevious(fc.getPrevious()); return vc; } catch (IOException e) { throw new WFSException(request, e); } }
public void write(Object value, OutputStream output, Operation operation) throws IOException, ServiceException { final FeatureDiffReader[] diffReaders = (FeatureDiffReader[]) value; // create a new feature collcetion type with just the numbers VersioningTransactionConverter converter = new VersioningTransactionConverter(); final TransactionType transaction = converter.convert(diffReaders, TransactionType.class); // declare wfs schema location BaseRequestType gft = (BaseRequestType) operation.getParameters()[0]; Encoder encoder = new Encoder(configuration, configuration.schema()); encodeWfsSchemaLocation(encoder, gft.getBaseUrl()); encoder.setIndenting(true); encoder.setEncoding(Charset.forName(geoServer.getSettings().getCharset())); // set up schema locations // round up the info objects for each feature collection HashMap /* <String,Set> */ ns2metas = new HashMap(); for (int i = 0; i < diffReaders.length; i++) { final FeatureDiffReader diffReader = diffReaders[i]; final SimpleFeatureType featureType = diffReader.getSchema(); // load the metadata for the feature type String namespaceURI = featureType.getName().getNamespaceURI(); FeatureTypeInfo meta = geoServer .getCatalog() .getFeatureTypeByName(namespaceURI, featureType.getName().getLocalPart()); // add it to the map Set metas = (Set) ns2metas.get(namespaceURI); if (metas == null) { metas = new HashSet(); ns2metas.put(namespaceURI, metas); } metas.add(meta); } // declare application schema namespaces for (Iterator i = ns2metas.entrySet().iterator(); i.hasNext(); ) { Map.Entry entry = (Map.Entry) i.next(); String namespaceURI = (String) entry.getKey(); Set metas = (Set) entry.getValue(); StringBuffer typeNames = new StringBuffer(); for (Iterator m = metas.iterator(); m.hasNext(); ) { FeatureTypeInfo meta = (FeatureTypeInfo) m.next(); typeNames.append(meta.getName()); if (m.hasNext()) { typeNames.append(","); } } // set the schema location encodeTypeSchemaLocation(encoder, gft.getBaseUrl(), namespaceURI, typeNames); } try { encoder.encode(transaction, element, output); } finally { for (int i = 0; i < diffReaders.length; i++) { diffReaders[i].close(); } } }