public void testImportKMLIndirect() throws Exception { File dir = unpack("kml/sample.zip"); String wsName = getCatalog().getDefaultWorkspace().getName(); DataStoreInfo h2DataStore = createH2DataStore(wsName, "kmltest"); SpatialFile importData = new SpatialFile(new File(dir, "sample.kml")); ImportContext context = importer.createContext(importData, h2DataStore); assertEquals(1, context.getTasks().size()); ImportTask task = context.getTasks().get(0); LayerInfo layer = task.getLayer(); ResourceInfo resource = layer.getResource(); assertEquals("Invalid srs", "EPSG:4326", resource.getSRS()); ReferencedEnvelope emptyBounds = new ReferencedEnvelope(); emptyBounds.setToNull(); assertTrue("Unexpected bounding box", emptyBounds.equals(resource.getNativeBoundingBox())); // transform chain to limit characters // otherwise we get a sql exception thrown TransformChain transformChain = task.getTransform(); transformChain.add(new DescriptionLimitingTransform()); importer.run(context); Exception error = task.getError(); if (error != null) { error.printStackTrace(); fail(error.getMessage()); } assertFalse("Bounding box not updated", emptyBounds.equals(resource.getNativeBoundingBox())); FeatureTypeInfo fti = (FeatureTypeInfo) resource; assertEquals("Invalid type name", "sample", fti.getName()); FeatureSource<? extends FeatureType, ? extends Feature> featureSource = fti.getFeatureSource(null, null); assertEquals("Unexpected feature count", 20, featureSource.getCount(Query.ALL)); }
public void testImportIntoDatabaseWithEncoding() throws Exception { Catalog cat = getCatalog(); DataStoreInfo ds = createH2DataStore(cat.getDefaultWorkspace().getName(), "ming"); File dir = tmpDir(); unpack("shape/ming_time.zip", dir); ImportContext context = importer.createContext(new Directory(dir), ds); assertEquals(1, context.getTasks().size()); context.getTasks().get(0).getData().setCharsetEncoding("UTF-8"); importer.run(context); FeatureTypeInfo info = (FeatureTypeInfo) context.getTasks().get(0).getLayer().getResource(); FeatureSource<? extends FeatureType, ? extends Feature> fs = info.getFeatureSource(null, null); FeatureCollection<? extends FeatureType, ? extends Feature> features = fs.getFeatures(); FeatureIterator<? extends Feature> it = features.features(); assertTrue(it.hasNext()); SimpleFeature next = (SimpleFeature) it.next(); // let's test some attributes to see if they were digested properly String type_ch = (String) next.getAttribute("type_ch"); assertEquals("卫", type_ch); String name_ch = (String) next.getAttribute("name_ch"); assertEquals("杭州前卫", name_ch); it.close(); }
public void setNativeBox(Catalog catalog, String name) throws Exception { FeatureTypeInfo fti = catalog.getFeatureTypeByName(name); fti.setNativeBoundingBox(fti.getFeatureSource(null, null).getBounds()); fti.setLatLonBoundingBox( new ReferencedEnvelope(fti.getNativeBoundingBox(), DefaultGeographicCRS.WGS84)); catalog.save(fti); }
@Before public void setUp() throws URISyntaxException, IOException { outputFormat = new HTMLFeatureInfoOutputFormat(getWMS()); currentTemplate = "test_content.ftl"; // configure template loader GeoServerTemplateLoader templateLoader = new GeoServerTemplateLoader(this.getClass(), getDataDirectory()) { @Override public Object findTemplateSource(String path) throws IOException { String templatePath; if (path.toLowerCase().contains("content")) { templatePath = currentTemplate; } else { templatePath = "empty.ftl"; } try { return new File(this.getClass().getResource(templateFolder + templatePath).toURI()); } catch (URISyntaxException e) { return null; } } }; outputFormat.templateLoader = templateLoader; // test request with some parameters to use in templates Request request = new Request(); parameters = new HashMap<String, Object>(); parameters.put("LAYER", "testLayer"); Map<String, String> env = new HashMap<String, String>(); env.put("TEST1", "VALUE1"); env.put("TEST2", "VALUE2"); parameters.put("ENV", env); request.setKvp(parameters); Dispatcher.REQUEST.set(request); final FeatureTypeInfo featureType = getFeatureTypeInfo(MockData.PRIMITIVEGEOFEATURE); fcType = WfsFactory.eINSTANCE.createFeatureCollectionType(); fcType.getFeature().add(featureType.getFeatureSource(null, null).getFeatures()); // fake layer list List<MapLayerInfo> queryLayers = new ArrayList<MapLayerInfo>(); LayerInfo layerInfo = new LayerInfoImpl(); layerInfo.setType(PublishedType.VECTOR); ResourceInfo resourceInfo = new FeatureTypeInfoImpl(null); NamespaceInfo nameSpace = new NamespaceInfoImpl(); nameSpace.setPrefix("topp"); nameSpace.setURI("http://www.topp.org"); resourceInfo.setNamespace(nameSpace); layerInfo.setResource(resourceInfo); MapLayerInfo mapLayerInfo = new MapLayerInfo(layerInfo); queryLayers.add(mapLayerInfo); getFeatureInfoRequest = new GetFeatureInfoRequest(); getFeatureInfoRequest.setQueryLayers(queryLayers); }
private void configureGeogigDataStore() throws Exception { helper.insertAndAdd(helper.lines1); helper.getGeogig().command(CommitOp.class).call(); Catalog catalog = getCatalog(); CatalogFactory factory = catalog.getFactory(); NamespaceInfo ns = factory.createNamespace(); ns.setPrefix(WORKSPACE); ns.setURI(NAMESPACE); catalog.add(ns); WorkspaceInfo ws = factory.createWorkspace(); ws.setName(ns.getName()); catalog.add(ws); DataStoreInfo ds = factory.createDataStore(); ds.setEnabled(true); ds.setDescription("Test Geogig DataStore"); ds.setName(STORE); ds.setType(GeoGigDataStoreFactory.DISPLAY_NAME); ds.setWorkspace(ws); Map<String, Serializable> connParams = ds.getConnectionParameters(); Optional<URI> geogigDir = helper.getGeogig().command(ResolveGeogigURI.class).call(); File repositoryUrl = new File(geogigDir.get()).getParentFile(); assertTrue(repositoryUrl.exists() && repositoryUrl.isDirectory()); connParams.put(GeoGigDataStoreFactory.REPOSITORY.key, repositoryUrl); connParams.put(GeoGigDataStoreFactory.DEFAULT_NAMESPACE.key, ns.getURI()); catalog.add(ds); DataStoreInfo dsInfo = catalog.getDataStoreByName(WORKSPACE, STORE); assertNotNull(dsInfo); assertEquals(GeoGigDataStoreFactory.DISPLAY_NAME, dsInfo.getType()); DataAccess<? extends FeatureType, ? extends Feature> dataStore = dsInfo.getDataStore(null); assertNotNull(dataStore); assertTrue(dataStore instanceof GeoGigDataStore); FeatureTypeInfo fti = factory.createFeatureType(); fti.setNamespace(ns); fti.setCatalog(catalog); fti.setStore(dsInfo); fti.setSRS("EPSG:4326"); fti.setName("Lines"); fti.setAdvertised(true); fti.setEnabled(true); fti.setCqlFilter("INCLUDE"); fti.setProjectionPolicy(ProjectionPolicy.FORCE_DECLARED); ReferencedEnvelope bounds = new ReferencedEnvelope(-180, 180, -90, 90, CRS.decode("EPSG:4326")); fti.setNativeBoundingBox(bounds); fti.setLatLonBoundingBox(bounds); catalog.add(fti); fti = catalog.getFeatureType(fti.getId()); FeatureSource<? extends FeatureType, ? extends Feature> featureSource; featureSource = fti.getFeatureSource(null, null); assertNotNull(featureSource); }
private LayerReference vectorLayer() throws IOException { Catalog catalog = getCatalog(); FeatureTypeInfo typeInfo = catalog.getFeatureTypeByName(VECTOR_LAYER_NAME); FeatureSource<? extends FeatureType, ? extends Feature> source; source = typeInfo.getFeatureSource(null, null); LayerReference layerReference = new LayerReference(VECTOR_LAYER_NAME, source); return layerReference; }
@Test public void testForce() throws Exception { FeatureTypeInfo fti = getCatalog().getFeatureTypeByName(MockData.BASIC_POLYGONS.getLocalPart()); assertEquals("EPSG:4269", fti.getSRS()); assertEquals(ProjectionPolicy.FORCE_DECLARED, fti.getProjectionPolicy()); FeatureCollection fc = fti.getFeatureSource(null, null).getFeatures(); assertEquals(CRS.decode("EPSG:4269"), fc.getSchema().getCoordinateReferenceSystem()); FeatureIterator fi = fc.features(); Feature f = fi.next(); fi.close(); assertEquals(CRS.decode("EPSG:4269"), f.getType().getCoordinateReferenceSystem()); }
public void testImportCSVIndirect() throws Exception { File dir = unpack("csv/locations.zip"); String wsName = getCatalog().getDefaultWorkspace().getName(); DataStoreInfo h2DataStore = createH2DataStore(wsName, "csvindirecttest"); SpatialFile importData = new SpatialFile(new File(dir, "locations.csv")); ImportContext context = importer.createContext(importData, h2DataStore); assertEquals(1, context.getTasks().size()); ImportTask task = context.getTasks().get(0); TransformChain transformChain = task.getTransform(); transformChain.add(new AttributesToPointGeometryTransform("LAT", "LON")); assertEquals(ImportTask.State.NO_CRS, task.getState()); LayerInfo layer = task.getLayer(); ResourceInfo resource = layer.getResource(); resource.setSRS("EPSG:4326"); assertTrue("Item not ready", importer.prep(task)); assertEquals(ImportTask.State.READY, task.getState()); context.updated(); assertEquals(ImportContext.State.PENDING, context.getState()); importer.run(context); assertEquals(ImportContext.State.COMPLETE, context.getState()); FeatureTypeInfo fti = (FeatureTypeInfo) resource; SimpleFeatureType featureType = (SimpleFeatureType) fti.getFeatureType(); GeometryDescriptor geometryDescriptor = featureType.getGeometryDescriptor(); assertNotNull("Expecting geometry", geometryDescriptor); assertEquals("Invalid geometry name", "location", geometryDescriptor.getLocalName()); assertEquals(3, featureType.getAttributeCount()); FeatureSource<? extends FeatureType, ? extends Feature> featureSource = fti.getFeatureSource(null, null); FeatureCollection<? extends FeatureType, ? extends Feature> features = featureSource.getFeatures(); assertEquals(9, features.size()); FeatureIterator<? extends Feature> featureIterator = features.features(); assertTrue("Expected features", featureIterator.hasNext()); SimpleFeature feature = (SimpleFeature) featureIterator.next(); assertNotNull(feature); assertEquals("Invalid city attribute", "Trento", feature.getAttribute("CITY")); assertEquals("Invalid number attribute", 140, feature.getAttribute("NUMBER")); Object geomAttribute = feature.getAttribute("location"); assertNotNull("Expected geometry", geomAttribute); Point point = (Point) geomAttribute; Coordinate coordinate = point.getCoordinate(); assertEquals("Invalid x coordinate", 11.12, coordinate.x, 0.1); assertEquals("Invalid y coordinate", 46.07, coordinate.y, 0.1); featureIterator.close(); }
/** * @param dataStore * @throws IOException */ private void checkProcessingAllowed(JDBCDataStore dataStore, int processing) throws IOException { // access through catalog to let the security jump in FeatureTypeInfo ft = catalog.getResourceByName("destination", "siig_mtd_d_elaborazione", FeatureTypeInfo.class); FeatureSource source = ft.getFeatureSource(null, null); FeatureCollection fc = source.getFeatures(ff2.equals(ff2.property("id_elaborazione"), ff2.literal(processing))); // check if the given processing is allowed (if not is filtered out from results // so size should be 0) if (fc.size() != 1) { throw new ProcessException("Operation not allowed"); } }
@Test public void testWithRename() throws Exception { FeatureTypeInfo fti = getCatalog().getFeatureTypeByName("MyPoints"); assertEquals("EPSG:4326", fti.getSRS()); assertEquals(ProjectionPolicy.REPROJECT_TO_DECLARED, fti.getProjectionPolicy()); FeatureCollection fc = fti.getFeatureSource(null, null).getFeatures(); assertEquals(CRS.decode("EPSG:4326"), fc.getSchema().getCoordinateReferenceSystem()); FeatureIterator fi = fc.features(); Feature f = fi.next(); // test that geometry was reprojected Geometry g = (Geometry) f.getDefaultGeometryProperty().getValue(); assertFalse(g.equalsExact(WKT.read("POINT(500050 500050)"))); fi.close(); assertEquals(CRS.decode("EPSG:4326"), f.getType().getCoordinateReferenceSystem()); }
@Test public void testLeaveNative() throws Exception { FeatureTypeInfo fti = getCatalog().getFeatureTypeByName(MockData.LINES.getLocalPart()); assertEquals("EPSG:3004", fti.getSRS()); assertEquals(ProjectionPolicy.NONE, fti.getProjectionPolicy()); FeatureCollection fc = fti.getFeatureSource(null, null).getFeatures(); assertEquals(CRS.decode("EPSG:32615"), fc.getSchema().getCoordinateReferenceSystem()); FeatureIterator fi = fc.features(); Feature f = fi.next(); // test that the geometry was left in tact Geometry g = (Geometry) f.getDefaultGeometryProperty().getValue(); assertTrue(g.equalsExact(WKT.read("LINESTRING(500125 500025,500175 500075)"))); fi.close(); assertEquals(CRS.decode("EPSG:32615"), f.getType().getCoordinateReferenceSystem()); }
public void testIntegerToDateTransform() throws Exception { Catalog cat = getCatalog(); File dir = unpack("shape/archsites_epsg_prj.zip"); SpatialFile file = new SpatialFile(new File(dir, "archsites.shp")); file.prepare(); ImportContext context = importer.createContext(file, store); assertEquals(1, context.getTasks().size()); context.setTargetStore(store); ImportTask task = context.getTasks().get(0); // this is a silly test - CAT_ID ranges from 1-25 and is not supposed to be a date // java date handling doesn't like dates in year 1 task.getTransform().add(new IntegerFieldToDateTransform("CAT_ID")); importer.run(context); assertEquals(ImportContext.State.COMPLETE, context.getState()); FeatureTypeInfo ft = cat.getFeatureTypeByDataStore(store, "archsites"); assertNotNull(ft); SimpleFeatureType schema = (SimpleFeatureType) ft.getFeatureType(); assertEquals(Timestamp.class, schema.getDescriptor("CAT_ID").getType().getBinding()); FeatureIterator it = ft.getFeatureSource(null, null).getFeatures().features(); int year = 2; Calendar cal = Calendar.getInstance(); cal.setTimeZone(TimeZone.getTimeZone("UTC")); try { // make sure we have something assertTrue(it.hasNext()); // the first date will be bogus due to java date limitation it.next(); while (it.hasNext()) { SimpleFeature f = (SimpleFeature) it.next(); // class will be timestamp cal.setTime((Date) f.getAttribute("CAT_ID")); assertEquals(year++, cal.get(Calendar.YEAR)); } } finally { it.close(); } }
/** * Returns the collection of all values of the dimension attribute, eventually sorted if the * native capabilities allow for it * * @param typeInfo * @param dimension * @return * @throws IOException */ FeatureCollection getDimensionCollection(FeatureTypeInfo typeInfo, DimensionInfo dimension) throws IOException { // grab the feature source FeatureSource source = null; try { source = typeInfo.getFeatureSource(null, GeoTools.getDefaultHints()); } catch (IOException e) { throw new ServiceException( "Could not get the feauture source to list time info for layer " + typeInfo.getPrefixedName(), e); } // build query to grab the dimension values final Query dimQuery = new Query(source.getSchema().getName().getLocalPart()); dimQuery.setPropertyNames(Arrays.asList(dimension.getAttribute())); return source.getFeatures(dimQuery); }
public void testDateFormatTransform() throws Exception { Catalog cat = getCatalog(); File dir = unpack("shape/ivan.zip"); SpatialFile file = new SpatialFile(new File(dir, "ivan.shp")); file.prepare(); ImportContext context = importer.createContext(file, store); assertEquals(1, context.getTasks().size()); context.setTargetStore(store); ImportTask task = context.getTasks().get(0); task.getTransform().add(new DateFormatTransform("timestamp", "yyyy-MM-dd HH:mm:ss.S")); importer.run(context); assertEquals(ImportContext.State.COMPLETE, context.getState()); FeatureTypeInfo ft = cat.getFeatureTypeByDataStore(store, "ivan"); assertNotNull(ft); SimpleFeatureType schema = (SimpleFeatureType) ft.getFeatureType(); assertTrue( Date.class.isAssignableFrom(schema.getDescriptor("timestamp").getType().getBinding())); FeatureIterator it = ft.getFeatureSource(null, null).getFeatures().features(); try { assertTrue(it.hasNext()); while (it.hasNext()) { SimpleFeature f = (SimpleFeature) it.next(); assertTrue(f.getAttribute("timestamp") instanceof Date); } } finally { it.close(); } }
/** * If this layers has been setup to reproject data, skipReproject = true will disable * reprojection. This method is build especially for the rendering subsystem that should be able * to perform a full reprojection on its own, and do generalization before reprojection (thus * avoid to reproject all of the original coordinates) */ public FeatureSource<? extends FeatureType, ? extends Feature> getFeatureSource( boolean skipReproject) throws IOException { if (type != TYPE_VECTOR) { throw new IllegalArgumentException("Layer type is not vector"); } // ask for enabled() instead of isEnabled() to account for disabled resource/store if (!layerInfo.enabled()) { throw new IOException( "featureType: " + getName() + " does not have a properly configured " + "datastore"); } FeatureTypeInfo resource = (FeatureTypeInfo) layerInfo.getResource(); if (resource.getStore() == null || resource.getStore().getDataStore(null) == null) { throw new IOException( "featureType: " + getName() + " does not have a properly configured " + "datastore"); } Hints hints = new Hints(ResourcePool.REPROJECT, Boolean.valueOf(!skipReproject)); return resource.getFeatureSource(null, hints); }
public void testNumberFormatTransform() throws Exception { Catalog cat = getCatalog(); File dir = unpack("shape/restricted.zip"); SpatialFile file = new SpatialFile(new File(dir, "restricted.shp")); file.prepare(); ImportContext context = importer.createContext(file, store); assertEquals(1, context.getTasks().size()); context.setTargetStore(store); ImportTask task = context.getTasks().get(0); task.getTransform().add(new NumberFormatTransform("cat", Integer.class)); importer.run(context); assertEquals(ImportContext.State.COMPLETE, context.getState()); FeatureTypeInfo ft = cat.getFeatureTypeByDataStore(store, "restricted"); assertNotNull(ft); SimpleFeatureType schema = (SimpleFeatureType) ft.getFeatureType(); assertEquals(Integer.class, schema.getDescriptor("cat").getType().getBinding()); FeatureIterator it = ft.getFeatureSource(null, null).getFeatures().features(); try { assertTrue(it.hasNext()); while (it.hasNext()) { SimpleFeature f = (SimpleFeature) it.next(); assertTrue(f.getAttribute("cat") instanceof Integer); } } finally { it.close(); } }
public MockCatalogBuilder featureType( final String name, String srs, ProjectionPolicy projPolicy, ReferencedEnvelope envelope, ReferencedEnvelope latLonEnvelope) { String ftId = newId(); final DataStoreInfo ds = dataStores.peekLast(); NamespaceInfo ns = namespaces.peekLast(); final FeatureTypeInfo ft = createNiceMock(FeatureTypeInfo.class); featureTypes.add(ft); initResource( ft, FeatureTypeInfo.class, ftId, name, ds, ns, srs, projPolicy, envelope, latLonEnvelope); expect(ft.getNumDecimals()).andReturn(8); // setup the property file data File propDir = new File(dataDirRoot, ds.getName()); propDir.mkdirs(); String fileName = name + ".properties"; try { IOUtils.copy(getClass().getResourceAsStream(fileName), new File(propDir, fileName)); } catch (IOException e) { throw new RuntimeException(e); } try { expect(ft.getFeatureType()) .andAnswer( new IAnswer<FeatureType>() { @Override public FeatureType answer() throws Throwable { return ((DataStore) ds.getDataStore(null)).getSchema(name); } }) .anyTimes(); expect(ft.getFeatureSource(null, null)) .andAnswer( (IAnswer) new IAnswer<FeatureSource>() { @Override public FeatureSource answer() throws Throwable { return ((DataStore) ds.getDataStore(null)).getFeatureSource(name); } }) .anyTimes(); } catch (IOException e) { } expect(catalog.getFeatureTypeByName(or(eq(name), eq(ns.getPrefix() + ":" + name)))) .andReturn(ft) .anyTimes(); expect( catalog.getFeatureTypeByName( or(eq(new NameImpl(ns.getPrefix(), name)), eq(new NameImpl(ns.getURI(), name))))) .andReturn(ft) .anyTimes(); expect(catalog.getFeatureTypeByName(ns, name)).andReturn(ft).anyTimes(); expect(catalog.getFeatureTypeByName(ns.getPrefix(), name)).andReturn(ft).anyTimes(); // expect(catalog.getFeatureTypeByName(or(eq(ns.getPrefix()), eq(ns.getURI())), name)) // .andReturn(ft).anyTimes(); expect(catalog.getFeatureTypeByStore(ds, name)).andReturn(ft).anyTimes(); expect(catalog.getFeatureTypeByDataStore(ds, name)).andReturn(ft).anyTimes(); ft.accept((CatalogVisitor) anyObject()); expectLastCall() .andAnswer( new VisitAnswer() { @Override protected void doVisit(CatalogVisitor visitor) { visitor.visit(ft); } }) .anyTimes(); callback.onResource(name, ft, ds, this); replay(ft, createLayer(ft, name, ns)); return this; }
@SuppressWarnings("unchecked") @Test public void testConcurrentRequests() throws Exception { FeatureTypeInfo featureType1 = getFeatureTypeInfo(MockData.PRIMITIVEGEOFEATURE); List<MapLayerInfo> layers1 = Collections.singletonList( new MapLayerInfo(getCatalog().getLayerByName(featureType1.prefixedName()))); FeatureCollectionType type1 = WfsFactory.eINSTANCE.createFeatureCollectionType(); type1.getFeature().add(featureType1.getFeatureSource(null, null).getFeatures()); final FeatureTypeInfo featureType2 = getFeatureTypeInfo(MockData.BASIC_POLYGONS); List<MapLayerInfo> layers2 = Collections.singletonList( new MapLayerInfo(getCatalog().getLayerByName(featureType2.prefixedName()))); FeatureCollectionType type2 = WfsFactory.eINSTANCE.createFeatureCollectionType(); type2.getFeature().add(featureType2.getFeatureSource(null, null).getFeatures()); final HTMLFeatureInfoOutputFormat format = new HTMLFeatureInfoOutputFormat(getWMS()); format.templateLoader = new GeoServerTemplateLoader(getClass(), getDataDirectory()) { @Override public Object findTemplateSource(String path) throws IOException { String templatePath = "empty.ftl"; if (path.toLowerCase().contains("content") && (this.resource != null) && this.resource.prefixedName().equals(featureType2.prefixedName())) { templatePath = "test_content.ftl"; } try { return new File(this.getClass().getResource(templateFolder + templatePath).toURI()); } catch (URISyntaxException e) { return null; } } }; int numRequests = 50; List<Callable<String>> tasks = new ArrayList<>(numRequests); for (int i = 0; i < numRequests; i++) { final GetFeatureInfoRequest request = new GetFeatureInfoRequest(); request.setQueryLayers(((i % 2) == 0) ? layers1 : layers2); final FeatureCollectionType type = (((i % 2) == 0) ? type1 : type2); tasks.add( new Callable<String>() { @Override public String call() throws Exception { ByteArrayOutputStream output = new ByteArrayOutputStream(); format.write(type, request, output); return new String(output.toByteArray()); } }); } ExecutorService executor = Executors.newFixedThreadPool(8); try { List<Future<String>> futures = executor.invokeAll(tasks); for (int i = 0; i < numRequests; i++) { String info = futures.get(i).get(); if ((i % 2) == 0) { assertEquals("", info); } else { assertNotEquals("", info); } } } finally { executor.shutdown(); } }