Esempio n. 1
0
  public void testImportKMLIndirect() throws Exception {
    File dir = unpack("kml/sample.zip");
    String wsName = getCatalog().getDefaultWorkspace().getName();
    DataStoreInfo h2DataStore = createH2DataStore(wsName, "kmltest");
    SpatialFile importData = new SpatialFile(new File(dir, "sample.kml"));
    ImportContext context = importer.createContext(importData, h2DataStore);
    assertEquals(1, context.getTasks().size());
    ImportTask task = context.getTasks().get(0);

    LayerInfo layer = task.getLayer();
    ResourceInfo resource = layer.getResource();
    assertEquals("Invalid srs", "EPSG:4326", resource.getSRS());
    ReferencedEnvelope emptyBounds = new ReferencedEnvelope();
    emptyBounds.setToNull();
    assertTrue("Unexpected bounding box", emptyBounds.equals(resource.getNativeBoundingBox()));
    // transform chain to limit characters
    // otherwise we get a sql exception thrown
    TransformChain transformChain = task.getTransform();
    transformChain.add(new DescriptionLimitingTransform());
    importer.run(context);
    Exception error = task.getError();
    if (error != null) {
      error.printStackTrace();
      fail(error.getMessage());
    }
    assertFalse("Bounding box not updated", emptyBounds.equals(resource.getNativeBoundingBox()));
    FeatureTypeInfo fti = (FeatureTypeInfo) resource;
    assertEquals("Invalid type name", "sample", fti.getName());
    FeatureSource<? extends FeatureType, ? extends Feature> featureSource =
        fti.getFeatureSource(null, null);
    assertEquals("Unexpected feature count", 20, featureSource.getCount(Query.ALL));
  }
  private void buildGlobalSiteMap(Element sitemapindex) {
    for (FeatureTypeInfo ft : getData().getFeatureTypes()) {
      try {
        if ((Boolean) ft.getMetadata().get("indexingEnabled")) {
          String ftSitemap = GEOSERVER_ROOT + "/layers/" + ft.getName() + "/sitemap.xml";

          addSitemap(sitemapindex, ftSitemap);
        }
      } catch (Exception e) {
        // Do nothing ?
      }
    }
  }
Esempio n. 3
0
  /** Reads the catalog from disk. */
  Catalog readCatalog(XStreamPersister xp) throws Exception {
    CatalogImpl catalog = new CatalogImpl();
    catalog.setResourceLoader(resourceLoader);
    xp.setCatalog(catalog);
    xp.setUnwrapNulls(false);

    CatalogFactory factory = catalog.getFactory();

    // global styles
    loadStyles(resourceLoader.find("styles"), catalog, xp);

    // workspaces, stores, and resources
    File workspaces = resourceLoader.find("workspaces");
    if (workspaces != null) {
      // do a first quick scan over all workspaces, setting the default
      File dws = new File(workspaces, "default.xml");
      WorkspaceInfo defaultWorkspace = null;
      if (dws.exists()) {
        try {
          defaultWorkspace = depersist(xp, dws, WorkspaceInfo.class);
          LOGGER.info("Loaded default workspace " + defaultWorkspace.getName());
        } catch (Exception e) {
          LOGGER.log(Level.WARNING, "Failed to load default workspace", e);
        }
      } else {
        LOGGER.warning("No default workspace was found.");
      }

      for (File wsd : list(workspaces, DirectoryFileFilter.INSTANCE)) {
        File f = new File(wsd, "workspace.xml");
        if (!f.exists()) {
          continue;
        }

        WorkspaceInfo ws = null;
        try {
          ws = depersist(xp, f, WorkspaceInfo.class);
          catalog.add(ws);
        } catch (Exception e) {
          LOGGER.log(Level.WARNING, "Failed to load workspace '" + wsd.getName() + "'", e);
          continue;
        }

        LOGGER.info("Loaded workspace '" + ws.getName() + "'");

        // load the namespace
        File nsf = new File(wsd, "namespace.xml");
        NamespaceInfo ns = null;
        if (nsf.exists()) {
          try {
            ns = depersist(xp, nsf, NamespaceInfo.class);
            catalog.add(ns);
          } catch (Exception e) {
            LOGGER.log(Level.WARNING, "Failed to load namespace for '" + wsd.getName() + "'", e);
          }
        }

        // set the default workspace, this value might be null in the case of coming from a
        // 2.0.0 data directory. See http://jira.codehaus.org/browse/GEOS-3440
        if (defaultWorkspace != null) {
          if (ws.getName().equals(defaultWorkspace.getName())) {
            catalog.setDefaultWorkspace(ws);
            if (ns != null) {
              catalog.setDefaultNamespace(ns);
            }
          }
        } else {
          // create the default.xml file
          defaultWorkspace = catalog.getDefaultWorkspace();
          if (defaultWorkspace != null) {
            try {
              persist(xp, defaultWorkspace, dws);
            } catch (Exception e) {
              LOGGER.log(
                  Level.WARNING, "Failed to persist default workspace '" + wsd.getName() + "'", e);
            }
          }
        }

        // load the styles for the workspace
        File styles = resourceLoader.find(wsd, "styles");
        if (styles != null) {
          loadStyles(styles, catalog, xp);
        }
      }

      for (File wsd : list(workspaces, DirectoryFileFilter.INSTANCE)) {

        // load the stores for this workspace
        for (File sd : list(wsd, DirectoryFileFilter.INSTANCE)) {
          File f = new File(sd, "datastore.xml");
          if (f.exists()) {
            // load as a datastore
            DataStoreInfo ds = null;
            try {
              ds = depersist(xp, f, DataStoreInfo.class);
              catalog.add(ds);

              LOGGER.info("Loaded data store '" + ds.getName() + "'");

              if (ds.isEnabled()) {
                // connect to the datastore to determine if we should disable it
                try {
                  ds.getDataStore(null);
                } catch (Throwable t) {
                  LOGGER.warning("Error connecting to '" + ds.getName() + "'. Disabling.");
                  LOGGER.log(Level.INFO, "", t);

                  ds.setError(t);
                  ds.setEnabled(false);
                }
              }
            } catch (Exception e) {
              LOGGER.log(Level.WARNING, "Failed to load data store '" + sd.getName() + "'", e);
              continue;
            }

            // load feature types
            for (File ftd : list(sd, DirectoryFileFilter.INSTANCE)) {
              f = new File(ftd, "featuretype.xml");
              if (f.exists()) {
                FeatureTypeInfo ft = null;
                try {
                  ft = depersist(xp, f, FeatureTypeInfo.class);
                } catch (Exception e) {
                  LOGGER.log(
                      Level.WARNING, "Failed to load feature type '" + ftd.getName() + "'", e);
                  continue;
                }

                catalog.add(ft);

                LOGGER.info("Loaded feature type '" + ds.getName() + "'");

                f = new File(ftd, "layer.xml");
                if (f.exists()) {
                  try {
                    LayerInfo l = depersist(xp, f, LayerInfo.class);
                    catalog.add(l);

                    LOGGER.info("Loaded layer '" + l.getName() + "'");
                  } catch (Exception e) {
                    LOGGER.log(
                        Level.WARNING,
                        "Failed to load layer for feature type '" + ft.getName() + "'",
                        e);
                  }
                }
              } else {
                LOGGER.warning("Ignoring feature type directory " + ftd.getAbsolutePath());
              }
            }
          } else {
            // look for a coverage store
            f = new File(sd, "coveragestore.xml");
            if (f.exists()) {
              CoverageStoreInfo cs = null;
              try {
                cs = depersist(xp, f, CoverageStoreInfo.class);
                catalog.add(cs);

                LOGGER.info("Loaded coverage store '" + cs.getName() + "'");
              } catch (Exception e) {
                LOGGER.log(
                    Level.WARNING, "Failed to load coverage store '" + sd.getName() + "'", e);
                continue;
              }

              // load coverages
              for (File cd : list(sd, DirectoryFileFilter.INSTANCE)) {
                f = new File(cd, "coverage.xml");
                if (f.exists()) {
                  CoverageInfo c = null;
                  try {
                    c = depersist(xp, f, CoverageInfo.class);
                    catalog.add(c);

                    LOGGER.info("Loaded coverage '" + cs.getName() + "'");
                  } catch (Exception e) {
                    LOGGER.log(Level.WARNING, "Failed to load coverage '" + cd.getName() + "'", e);
                    continue;
                  }

                  f = new File(cd, "layer.xml");
                  if (f.exists()) {
                    try {
                      LayerInfo l = depersist(xp, f, LayerInfo.class);
                      catalog.add(l);

                      LOGGER.info("Loaded layer '" + l.getName() + "'");
                    } catch (Exception e) {
                      LOGGER.log(
                          Level.WARNING, "Failed to load layer coverage '" + c.getName() + "'", e);
                    }
                  }
                } else {
                  LOGGER.warning("Ignoring coverage directory " + cd.getAbsolutePath());
                }
              }
            } else {
              f = new File(sd, "wmsstore.xml");
              if (f.exists()) {
                WMSStoreInfo wms = null;
                try {
                  wms = depersist(xp, f, WMSStoreInfo.class);
                  catalog.add(wms);

                  LOGGER.info("Loaded wmsstore '" + wms.getName() + "'");
                } catch (Exception e) {
                  LOGGER.log(Level.WARNING, "Failed to load wms store '" + sd.getName() + "'", e);
                  continue;
                }

                // load wms layers
                for (File cd : list(sd, DirectoryFileFilter.INSTANCE)) {
                  f = new File(cd, "wmslayer.xml");
                  if (f.exists()) {
                    WMSLayerInfo wl = null;
                    try {
                      wl = depersist(xp, f, WMSLayerInfo.class);
                      catalog.add(wl);

                      LOGGER.info("Loaded wms layer'" + wl.getName() + "'");
                    } catch (Exception e) {
                      LOGGER.log(
                          Level.WARNING, "Failed to load wms layer '" + cd.getName() + "'", e);
                      continue;
                    }

                    f = new File(cd, "layer.xml");
                    if (f.exists()) {
                      try {
                        LayerInfo l = depersist(xp, f, LayerInfo.class);
                        catalog.add(l);

                        LOGGER.info("Loaded layer '" + l.getName() + "'");
                      } catch (Exception e) {
                        LOGGER.log(
                            Level.WARNING,
                            "Failed to load cascaded wms layer '" + wl.getName() + "'",
                            e);
                      }
                    }
                  } else {
                    LOGGER.warning("Ignoring coverage directory " + cd.getAbsolutePath());
                  }
                }
              } else if (!isConfigDirectory(sd)) {
                LOGGER.warning("Ignoring store directory '" + sd.getName() + "'");
                continue;
              }
            }
          }
        }

        // load hte layer groups for this workspace
        File layergroups = resourceLoader.find(wsd, "layergroups");
        if (layergroups != null) {
          loadLayerGroups(layergroups, catalog, xp);
        }
      }
    } else {
      LOGGER.warning("No 'workspaces' directory found, unable to load any stores.");
    }

    // namespaces

    // layergroups
    File layergroups = resourceLoader.find("layergroups");
    if (layergroups != null) {
      loadLayerGroups(layergroups, catalog, xp);
    }
    xp.setUnwrapNulls(true);
    catalog.resolve();
    return catalog;
  }
  public void write(Object value, OutputStream output, Operation operation)
      throws IOException, ServiceException {
    final FeatureDiffReader[] diffReaders = (FeatureDiffReader[]) value;

    // create a new feature collcetion type with just the numbers
    VersioningTransactionConverter converter = new VersioningTransactionConverter();
    final TransactionType transaction = converter.convert(diffReaders, TransactionType.class);

    // declare wfs schema location
    BaseRequestType gft = (BaseRequestType) operation.getParameters()[0];

    Encoder encoder = new Encoder(configuration, configuration.schema());
    encodeWfsSchemaLocation(encoder, gft.getBaseUrl());

    encoder.setIndenting(true);
    encoder.setEncoding(Charset.forName(geoServer.getSettings().getCharset()));

    // set up schema locations
    // round up the info objects for each feature collection
    HashMap /* <String,Set> */ ns2metas = new HashMap();

    for (int i = 0; i < diffReaders.length; i++) {
      final FeatureDiffReader diffReader = diffReaders[i];
      final SimpleFeatureType featureType = diffReader.getSchema();

      // load the metadata for the feature type
      String namespaceURI = featureType.getName().getNamespaceURI();
      FeatureTypeInfo meta =
          geoServer
              .getCatalog()
              .getFeatureTypeByName(namespaceURI, featureType.getName().getLocalPart());

      // add it to the map
      Set metas = (Set) ns2metas.get(namespaceURI);

      if (metas == null) {
        metas = new HashSet();
        ns2metas.put(namespaceURI, metas);
      }

      metas.add(meta);
    }

    // declare application schema namespaces
    for (Iterator i = ns2metas.entrySet().iterator(); i.hasNext(); ) {
      Map.Entry entry = (Map.Entry) i.next();

      String namespaceURI = (String) entry.getKey();
      Set metas = (Set) entry.getValue();

      StringBuffer typeNames = new StringBuffer();

      for (Iterator m = metas.iterator(); m.hasNext(); ) {
        FeatureTypeInfo meta = (FeatureTypeInfo) m.next();
        typeNames.append(meta.getName());

        if (m.hasNext()) {
          typeNames.append(",");
        }
      }

      // set the schema location
      encodeTypeSchemaLocation(encoder, gft.getBaseUrl(), namespaceURI, typeNames);
    }

    try {
      encoder.encode(transaction, element, output);
    } finally {
      for (int i = 0; i < diffReaders.length; i++) {
        diffReaders[i].close();
      }
    }
  }