static { // uncomment to turn up logging java.util.logging.ConsoleHandler handler = new java.util.logging.ConsoleHandler(); handler.setLevel(java.util.logging.Level.FINE); org.geotools.util.logging.Logging.getLogger("org.geotools.data.solr") .setLevel(java.util.logging.Level.FINE); org.geotools.util.logging.Logging.getLogger("org.geotools.data.solr").addHandler(handler); }
/** * Returns the URL to the property file that contains CRS definitions. The default implementation * performs the following search path: * * <ul> * <li>If a value is set for the {@value #CRS_DIRECTORY_KEY} system property key, then the * {@value #FILENAME} file will be searched in this directory. * <li>If no value is set for the above-cited system property, or if no {@value #FILENAME} file * was found in that directory, then the first {@value #FILENAME} file found in any {@code * org/geotools/referencing/factory/epsg} directory on the classpath will be used. * <li>If no file was found on the classpath neither, then this factory will be disabled. * </ul> * * @return The URL, or {@code null} if none. */ protected URL getDefinitionsURL() { try { if (directory != null) { final File file = new File(directory, FILENAME); if (file.isFile()) { return file.toURI().toURL(); } } } catch (SecurityException exception) { Logging.unexpectedException(LOGGER, exception); } catch (MalformedURLException exception) { Logging.unexpectedException(LOGGER, exception); } return FactoryUsingWKT.class.getResource(FILENAME); }
/** * This utility reads in XML requests and returns them as appropriate request objects. * * @author Rob Hranac, TOPP * @author Chris Holmes, TOPP * @author Gabriel Rold?n * @version $Id$ */ public abstract class XmlRequestReader { /** Class logger */ protected static Logger LOGGER = org.geotools.util.logging.Logging.getLogger("org.vfny.geoserver.requests.readers"); /** The service handling the request * */ private ServiceInfo serviceConfig; /** * DOCUMENT ME! * * @param reader DOCUMENT ME! * @return DOCUMENT ME! * @throws ServiceException DOCUMENT ME! */ public abstract Request read(Reader reader, HttpServletRequest req) throws ServiceException; /** * This will create a new XmlRequestReader * * @param service The config of the service handling the request */ public XmlRequestReader(ServiceInfo service) { this.serviceConfig = service; ; } public ServiceInfo getService() { return serviceConfig; } }
public class ReflectiveRegionatingStrategyFactory implements RegionatingStrategyFactory { private static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger("org.geoserver.geosearch"); String myName; String myClassName; Class myStrategyClass; GeoServer gs; public ReflectiveRegionatingStrategyFactory(String name, String className, GeoServer gs) { myName = name; myClassName = className; this.gs = gs; } public ReflectiveRegionatingStrategyFactory(String name, Class strategy, GeoServer gs) { myName = name; myStrategyClass = strategy; this.gs = gs; } public boolean canHandle(String strategyName) { return (myName != null) && myName.equalsIgnoreCase(strategyName); } public String getName() { return myName; } public RegionatingStrategy createStrategy() { try { Class clazz = getStrategyClass(); Constructor c = clazz.getConstructor(GeoServer.class); if (c != null) { return (RegionatingStrategy) c.newInstance(gs); } return (RegionatingStrategy) clazz.newInstance(); } catch (Exception e) { throw new ServiceException(e); } } protected Class getStrategyClass() { if (myStrategyClass != null) return myStrategyClass; try { myStrategyClass = Class.forName(myClassName); } catch (Exception e) { LOGGER.log( Level.SEVERE, "Failed to find class " + myClassName + " for ReflectiveRegionatingStrategy.", e); } return myStrategyClass; } }
/** * Main class used to handle Execute requests * * @author Lucas Reed, Refractions Research Inc * @author Andrea Aime, OpenGeo */ public class Execute { static final Logger LOGGER = Logging.getLogger(Execute.class); int connectionTimeout; WPSInfo wps; ApplicationContext context; WPSExecutionManager executionManager; public Execute(WPSExecutionManager executionManager, ApplicationContext context) { this.context = context; this.executionManager = executionManager; } /** * Main method for performing decoding, execution, and response * * @param object * @param output * @throws IllegalArgumentException */ public ExecuteResponseType run(ExecuteType execute) { ResponseDocumentType responseDocument = null; OutputDefinitionType rawDataOutput = null; if (execute.getResponseForm() != null) { responseDocument = execute.getResponseForm().getResponseDocument(); rawDataOutput = execute.getResponseForm().getRawDataOutput(); } if (responseDocument != null && rawDataOutput != null) { throw new WPSException( "Invalid request, only one of the raw data output or the " + "response document should be specified in the request"); } ExecuteRequest request = new ExecuteRequest(execute); // TODO: get the startup time from the execution status ExecuteResponseBuilder builder = new ExecuteResponseBuilder(execute, context, new Date()); String executionId = executionManager.submit(request, !request.isAsynchronous()); builder.setExecutionId(executionId); if (!request.isAsynchronous()) { try { Map<String, Object> outputs = executionManager.getOutput(executionId, -1); builder.setOutputs(outputs); } catch (Exception e) { LOGGER.log(Level.SEVERE, "Process execution failed", e); builder.setException(e); } } return builder.build(); } }
/** * An implementation of {@link Format} for the JP2K format based on the MrSID driver. * * @author Daniele Romagnoli, GeoSolutions * @author Simone Giannecchini (simboss), GeoSolutions * @since 2.5.x * @source $URL$ */ public final class JP2MrSIDFormat extends BaseGDALGridFormat implements Format { /** Logger. */ private static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger("org.geotools.coverageio.gdal.jp2mrsid"); /** Creates an instance and sets the metadata. */ public JP2MrSIDFormat() { super(new JP2GDALMrSidImageReaderSpi()); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Creating a new JP2KFormat."); } setInfo(); } /** Sets the metadata information. */ protected void setInfo() { HashMap<String, String> info = new HashMap<String, String>(); info.put("name", "JP2MrSID"); info.put("description", "JP2K (MrSID) Coverage Format"); info.put("vendor", "Geotools"); info.put("docURL", ""); // TODO: set something info.put("version", "1.0"); mInfo = info; // writing parameters writeParameters = null; // reading parameters readParameters = new ParameterGroup( new DefaultParameterDescriptorGroup( mInfo, new GeneralParameterDescriptor[] { READ_GRIDGEOMETRY2D, USE_JAI_IMAGEREAD, USE_MULTITHREADING, SUGGESTED_TILE_SIZE })); } /** @see org.geotools.data.coverage.grid.AbstractGridFormat#getReader(Object, Hints) */ public JP2MrSIDReader getReader(Object source, Hints hints) { try { return new JP2MrSIDReader(source, hints); } catch (MismatchedDimensionException e) { final RuntimeException re = new RuntimeException(); re.initCause(e); throw re; } catch (DataSourceException e) { final RuntimeException re = new RuntimeException(); re.initCause(e); throw re; } } }
/** * Map response handler for JPEG image format. * * @author Simone Giannecchini * @since 1.4.x */ public final class JPEGMapResponse extends RenderedImageMapResponse { /** Logger. */ private static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger(JPEGMapResponse.class.toString()); private static final boolean CODEC_LIB_AVAILABLE = PackageUtil.isCodecLibAvailable(); /** * Default capabilities for JPEG . * * <p> * * <ol> * <li>tiled = supported * <li>multipleValues = unsupported * <li>paletteSupported = false * <li>transparency = false * </ol> * * <p>We should soon support multipage tiff. */ private static MapProducerCapabilities CAPABILITIES = new MapProducerCapabilities(true, false, false, false); /** the only MIME type this map producer supports */ private static final String MIME_TYPE = "image/jpeg"; public JPEGMapResponse(WMS wms) { super(MIME_TYPE, wms); } @Override public void formatImageOutputStream( RenderedImage image, OutputStream outStream, WMSMapContext mapContext) throws IOException { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("About to write a JPEG image."); } boolean JPEGNativeAcc = wms.getJPEGNativeAcceleration() && CODEC_LIB_AVAILABLE; float quality = (100 - wms.getJpegCompression()) / 100.0f; ImageWorker iw = new ImageWorker(image); iw.writeJPEG(outStream, "JPEG", quality, JPEGNativeAcc); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Writing a JPEG done!!!"); } } @Override public MapProducerCapabilities getCapabilities(String outputFormat) { return CAPABILITIES; } }
/** * Initializes GeoTools for use. This convenience method performs various tasks (more may be added * in the future), including setting up the {@linkplain java.util.logging Java logging framework} * in one of the following states: * * <p> * * <ul> * <li>If the <A HREF="http://jakarta.apache.org/commons/logging/">Commons-logging</A> framework * is available, then every logging message in the {@code org.geotools} namespace sent to * the Java {@linkplain java.util.logging.Logger logger} are redirected to Commons-logging. * <li>Otherwise if the <A HREF="http://logging.apache.org/log4j">Log4J</A> framework is * available, then every logging message in the {@code org.geotools} namespace sent to the * Java {@linkplain java.util.logging.Logger logger} are redirected to Log4J. * <li>Otherwise, the Java logging {@linkplain java.util.logging.Formatter formatter} for * console output is replaced by a {@linkplain org.geotools.util.logging.MonolineFormatter * monoline formatter}. * </ul> * * <p>In addition, the {@linkplain #getDefaultHints default hints} are initialized to the * specified {@code hints}. * * <p>Note that invoking this method is usually <strong>not</strong> needed for proper working of * the Geotools library. It is just a convenience method for overwriting some Java and Geotools * default settings in a way that seems to be common in server environment. Such overwriting may * not be wanted for every situations. * * <p>Example of typical invocation in a Geoserver environment: * * <blockquote> * * <pre> * Hints hints = new Hints(); * hints.put({@linkplain Hints#FORCE_LONGITUDE_FIRST_AXIS_ORDER}, Boolean.TRUE); * hints.put({@linkplain Hints#FORCE_AXIS_ORDER_HONORING}, "http"); * GeoTools.init(hints); * </pre> * * </blockquote> * * @param hints The hints to use. * @see Logging#setLoggerFactory(String) * @see Logging#forceMonolineConsoleOutput * @see Hints#putSystemDefault * @see #getDefaultHints */ public static void init(final Hints hints) { final Logging log = Logging.GEOTOOLS; try { log.setLoggerFactory("org.geotools.util.logging.CommonsLoggerFactory"); } catch (ClassNotFoundException commonsException) { try { log.setLoggerFactory("org.geotools.util.logging.Log4JLoggerFactory"); } catch (ClassNotFoundException log4jException) { // Nothing to do, we already tried our best. } } // If java logging is used, force monoline console output. if (log.getLoggerFactory() == null) { log.forceMonolineConsoleOutput(); } if (hints != null) { Hints.putSystemDefault(hints); // fireConfigurationChanged() is invoked in the above method call. } }
/** Constructs an input stream for an URL. */ public ImageInputStream createInputStreamInstance( final Object input, final boolean useCache, final File cacheDir) throws IOException { final URL url = (URL) input; final URLConnection connection = url.openConnection(/*proxy*/ ); // TODO: uncomment with J2SE 1.5. int retry = RETRY; InputStream stream; while (true) { try { stream = connection.getInputStream(); break; } catch (SocketException exception) { if (--retry < 0) { throw exception; } Logging.getLogger(UrlInputSpi.class).warning(exception.toString()); } /* * Failed to get the connection. After we logged a warning, wait a little bit, run * the finalization and try again. Experience suggests that running the finalizers * sometime help, but also sometime freeze the system. FinalizationStopper may help * to unfreeze the system after a timeout. */ try { Thread.sleep(1000); } catch (InterruptedException ignore) { // Someone doesn't want to let us sleep. Go back to work... } System.gc(); Thread.interrupted(); // Clears the interrupted flag. final FinalizationStopper stopper = new FinalizationStopper(4000); System.runFinalization(); stopper.cancel(); // Thread.interrupted() must be first in order to clear the flag. if (Thread.interrupted() || stopper.interrupted) { Logging.getLogger(UrlInputSpi.class).warning("System.runFinalization() was blocked."); } } return new FileCacheImageInputStream(stream, cacheDir); }
/** * DescribeLayer WMS operation default implementation. * * @author carlo cancellieri */ public class DescribeLayer { private static final Logger LOGGER = Logging.getLogger(DescribeLayerModel.class); public DescribeLayer() {} private final List<LayerDescription> layerDescriptions = new ArrayList<LayerDescription>(); @SuppressWarnings({"unchecked", "rawtypes"}) public static DescribeLayerModel run(final DescribeLayerRequest request) throws ServiceException { return new DescribeLayerModel(request); } }
/** * A WFS response parser that parses server exception reports into {@link WFSException} objects. * * @author Gabriel Roldan (OpenGeo) * @version $Id$ * @since 2.6 * @source $URL$ * http://gtsvn.refractions.net/trunk/modules/plugin/wfs/src/main/java/org/geotools/data * /wfs/v1_1_0/parsers/ExceptionReportParser.java $ */ @SuppressWarnings({"nls", "unchecked"}) public class ExceptionReportParser implements WFSResponseParser { private static final Logger LOGGER = Logging.getLogger("org.geotools.data.wfs"); /** * @param wfs the {@link WFSDataStore} that sent the request * @param response a response handle to a service exception report * @return a {@link WFSException} containing the server returned exception report messages * @see WFSResponseParser#parse(WFSProtocol, WFSResponse) */ public Object parse(WFS_1_1_0_DataStore wfs, WFSResponse response) { WFSConfiguration configuration = new WFSConfiguration(); Parser parser = new Parser(configuration); InputStream responseStream = response.getInputStream(); Charset responseCharset = response.getCharacterEncoding(); Reader reader = new InputStreamReader(responseStream, responseCharset); Object parsed; try { parsed = parser.parse(reader); if (!(parsed instanceof net.opengis.ows10.ExceptionReportType)) { return new IOException("Unrecognized server error"); } } catch (Exception e) { return new WFSException("Exception parsing server exception report", e); } net.opengis.ows10.ExceptionReportType report = (ExceptionReportType) parsed; List<ExceptionType> exceptions = report.getException(); EObject originatingRequest = response.getOriginatingRequest(); StringBuilder msg = new StringBuilder("WFS returned an exception."); msg.append(" Target URL: " + response.getTargetUrl()); if (originatingRequest != null) { try { ByteArrayOutputStream out = new ByteArrayOutputStream(); WFS_1_1_0_Protocol.encode(originatingRequest, configuration, out, Charset.forName("UTF-8")); String requestStr = out.toString("UTF-8"); msg.append(". Originating request is: \n").append(requestStr).append("\n"); } catch (Exception e) { LOGGER.log(Level.FINE, "Error encoding request for exception report", e); } } WFSException result = new WFSException(msg.toString()); for (ExceptionType ex : exceptions) { result.addExceptionReport(String.valueOf(ex.getExceptionText())); } return result; } }
/** * An implementation of {@link Format} for the IDRIS (RST) format. * * @author Daniele Romagnoli, GeoSolutions * @author Simone Giannecchini (simboss), GeoSolutions * @since 2.5.x * @source $URL: * http://svn.osgeo.org/geotools/trunk/modules/plugin/imageio-ext-gdal/src/main/java/org * /geotools/coverageio/gdal/idrisi/IDRISIFormat.java $ */ public final class IDRISIFormat extends BaseGDALGridFormat implements Format { /** Logger. */ private static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger(IDRISIFormat.class.toString()); /** Creates an instance and sets the metadata. */ public IDRISIFormat() { super(new ArcBinaryGridImageReaderSpi()); if (LOGGER.isLoggable(Level.FINE)) { LOGGER.fine("Creating a new IDRISIFormat."); } setInfo(); } /** Sets the metadata information. */ protected void setInfo() { final HashMap<String, String> info = new HashMap<String, String>(); info.put("name", "RST"); info.put("description", "IDRIS (RST) Coverage Format"); info.put("vendor", "Geotools"); info.put("docURL", ""); // TODO: set something info.put("version", "1.0"); mInfo = Collections.unmodifiableMap(info); // writing parameters writeParameters = null; // reading parameters readParameters = getDefaultParameterGroup(info); } /** @see org.geotools.data.coverage.grid.AbstractGridFormat#getReader(Object, Hints) */ public IDRISIReader getReader(Object source, Hints hints) { try { return new IDRISIReader(source, hints); } catch (MismatchedDimensionException e) { final RuntimeException re = new RuntimeException(); re.initCause(e); throw re; } catch (DataSourceException e) { final RuntimeException re = new RuntimeException(); re.initCause(e); throw re; } } }
static class PlacemarkNameDecorator implements KmlDecorator { static final Logger LOGGER = Logging.getLogger(PlacemarkNameDecorator.class); @Override public Feature decorate(Feature feature, KmlEncodingContext context) { Placemark pm = (Placemark) feature; // try with the template SimpleFeature sf = context.getCurrentFeature(); String title = null; try { title = context.getTemplate().title(sf); } catch (IOException e) { String msg = "Error occured processing 'title' template."; LOGGER.log(Level.WARNING, msg, e); } // if we got nothing, set the title to the ID, but also try the text symbolizers if (title == null || "".equals(title)) { title = sf.getID(); StringBuffer label = new StringBuffer(); for (Symbolizer sym : context.getCurrentSymbolizers()) { if (sym instanceof TextSymbolizer) { Expression e = SLD.textLabel((TextSymbolizer) sym); String value = e.evaluate(feature, String.class); if ((value != null) && !"".equals(value.trim())) { label.append(value); } } } if (label.length() > 0) { title = label.toString(); } } pm.setName(title); return pm; } }
/** * Implementation of {@code fixName} method. If the context is {@code null}, then the {@linkplain * #getInitialContext GeoTools initial context} will be fetch only when first needed. */ private static String fixName(Context context, final String name, final Hints hints) { String fixed = null; if (name != null) { final StringTokenizer tokens = new StringTokenizer(name, ":/"); while (tokens.hasMoreTokens()) { final String part = tokens.nextToken(); if (fixed == null) { fixed = part; } else try { if (context == null) { context = getInitialContext(hints); } fixed = context.composeName(fixed, part); } catch (NamingException e) { Logging.unexpectedException(GeoTools.class, "fixName", e); return name; } } } return fixed; }
@Override protected void setUpInternal() throws Exception { super.setUpInternal(); Map<String, String> namespaces = new HashMap<String, String>(); namespaces.put("xlink", "http://www.w3.org/1999/xlink"); namespaces.put("xsi", "http://www.w3.org/2001/XMLSchema-instance"); namespaces.put("wms", "http://www.opengis.net/wms"); namespaces.put("ows", "http://www.opengis.net/ows"); namespaces.put("ogc", "http://www.opengis.net/ogc"); namespaces.put("wfs", "http://www.opengis.net/wfs"); namespaces.put("gml", "http://www.opengis.net/gml"); namespaces.put(WCS_PREFIX, WCS_URI); NamespaceContext ctx = new SimpleNamespaceContext(namespaces); XMLUnit.setXpathNamespaceContext(ctx); Logging.getLogger("org.geoserver.ows").setLevel(Level.OFF); WMSInfo wmsInfo = getGeoServer().getService(WMSInfo.class); wmsInfo.setMaxBuffer(50); getGeoServer().save(wmsInfo); }
@Override protected void computeRect( final PlanarImage[] sources, final WritableRaster dest, final Rectangle destRect) { final PlanarImage source = sources[0]; final Rectangle bounds = destRect.intersection(source.getBounds()); if (!destRect.equals(bounds)) { // TODO: Check if this case occurs sometime, and fill pixel values if it does. // If it happen to occurs, we will need to fix other GeoTools operations // as well. Logging.getLogger(TransformException.class) .warning("Bounds mismatch: " + destRect + " and " + bounds); } WritableRectIter iterator = RectIterFactory.createWritable(dest, bounds); // TODO: Detect if source and destination rasters are the same. If they are // the same, we should skip this block. Iteration will then be faster. iterator = TransfertRectIter.create(RectIterFactory.create(source, bounds), iterator); if (!iterator.finishedBands()) { do { recode(iterator); } while (!iterator.nextBandDone()); } }
/** @author Lucas Reed, Refractions Research Inc */ public class GetCapabilities { public WPSInfo wps; ApplicationContext context; static final Logger LOGGER = Logging.getLogger(GetCapabilities.class); private static Set<Name> PROCESS_BLACKLIST = Collections.EMPTY_SET; public GetCapabilities(WPSInfo wps, ApplicationContext context) { this.wps = wps; this.context = context; } public WPSCapabilitiesType run(GetCapabilitiesType request) throws WPSException { // do the version negotiation dance List<String> provided = Collections.singletonList("1.0.0"); List<String> accepted = null; if (request.getAcceptVersions() != null) accepted = request.getAcceptVersions().getVersion(); String version = RequestUtils.getVersionOws11(provided, accepted); if (!"1.0.0".equals(version)) { throw new WPSException("Could not understand version:" + version); } // TODO: add update sequence negotiation // encode the response Wps10Factory wpsf = Wps10Factory.eINSTANCE; Ows11Factory owsf = Ows11Factory.eINSTANCE; WPSCapabilitiesType caps = wpsf.createWPSCapabilitiesType(); caps.setVersion("1.0.0"); // TODO: make configurable caps.setLang("en"); // ServiceIdentification ServiceIdentificationType si = owsf.createServiceIdentificationType(); caps.setServiceIdentification(si); si.getTitle().add(Ows11Util.languageString(wps.getTitle())); si.getAbstract().add(Ows11Util.languageString(wps.getAbstract())); KeywordsType kw = Ows11Util.keywords(wps.keywordValues()); ; if (kw != null) { si.getKeywords().add(kw); } si.setServiceType(Ows11Util.code("WPS")); si.getServiceTypeVersion().add("1.0.0"); si.setFees(wps.getFees()); if (wps.getAccessConstraints() != null) { si.getAccessConstraints().add(wps.getAccessConstraints()); } // ServiceProvider ServiceProviderType sp = owsf.createServiceProviderType(); caps.setServiceProvider(sp); // TODO: set provder name from context GeoServerInfo geoServer = wps.getGeoServer().getGlobal(); if (geoServer.getContact().getContactOrganization() != null) { sp.setProviderName(geoServer.getContact().getContactOrganization()); } else { sp.setProviderName("GeoServer"); } sp.setProviderSite(owsf.createOnlineResourceType()); sp.getProviderSite().setHref(geoServer.getOnlineResource()); sp.setServiceContact(responsibleParty(geoServer, owsf)); // OperationsMetadata OperationsMetadataType om = owsf.createOperationsMetadataType(); caps.setOperationsMetadata(om); OperationType gco = owsf.createOperationType(); gco.setName("GetCapabilities"); gco.getDCP().add(Ows11Util.dcp("wps", request)); om.getOperation().add(gco); OperationType dpo = owsf.createOperationType(); dpo.setName("DescribeProcess"); dpo.getDCP().add(Ows11Util.dcp("wps", request)); om.getOperation().add(dpo); OperationType eo = owsf.createOperationType(); eo.setName("Execute"); eo.getDCP().add(Ows11Util.dcp("wps", request)); om.getOperation().add(eo); ProcessOfferingsType po = wpsf.createProcessOfferingsType(); caps.setProcessOfferings(po); // gather the process list for (ProcessFactory pf : Processors.getProcessFactories()) { for (Name name : pf.getNames()) { if (!getProcessBlacklist().contains(name)) { ProcessBriefType p = wpsf.createProcessBriefType(); p.setProcessVersion(pf.getVersion(name)); po.getProcess().add(p); p.setIdentifier(Ows11Util.code(name)); p.setTitle(Ows11Util.languageString(pf.getTitle(name))); p.setAbstract(Ows11Util.languageString(pf.getDescription(name))); } } } // sort it ECollections.sort( po.getProcess(), new Comparator() { public int compare(Object o1, Object o2) { ProcessBriefType pb1 = (ProcessBriefType) o1; ProcessBriefType pb2 = (ProcessBriefType) o2; final String id1 = pb1.getIdentifier().getValue(); final String id2 = pb2.getIdentifier().getValue(); return id1.compareTo(id2); } }); LanguagesType1 languages = wpsf.createLanguagesType1(); caps.setLanguages(languages); DefaultType2 defaultLanguage = wpsf.createDefaultType2(); languages.setDefault(defaultLanguage); defaultLanguage.setLanguage("en-US"); LanguagesType supportedLanguages = wpsf.createLanguagesType(); languages.setSupported(supportedLanguages); supportedLanguages.getLanguage().add("en-US"); return caps; // Version detection and alternative invocation if being implemented. } /** * Checks if our WPS can really handle this process inputs and outputs * * @param pf * @param name * @return */ Set<Name> getProcessBlacklist() { synchronized (PROCESS_BLACKLIST) { if (PROCESS_BLACKLIST == Collections.EMPTY_SET) { Set<Name> blacklist = new HashSet<Name>(); for (ProcessFactory pf : Processors.getProcessFactories()) { int count = 0; for (Name name : pf.getNames()) { try { // check inputs for (Parameter<?> p : pf.getParameterInfo(name).values()) { List<ProcessParameterIO> ppios = ProcessParameterIO.findAll(p, context); if (ppios.isEmpty()) { LOGGER.log( Level.INFO, "Blacklisting process " + name.getURI() + " as the input " + p.key + " of type " + p.type + " cannot be handled"); blacklist.add(name); } } // check outputs for (Parameter<?> p : pf.getResultInfo(name, null).values()) { List<ProcessParameterIO> ppios = ProcessParameterIO.findAll(p, context); if (ppios.isEmpty()) { LOGGER.log( Level.INFO, "Blacklisting process " + name.getURI() + " as the output " + p.key + " of type " + p.type + " cannot be handled"); blacklist.add(name); } } } catch (Throwable t) { blacklist.add(name); } if (!blacklist.contains(name)) { count++; } } LOGGER.info("Found " + count + " bindable processes in " + pf.getTitle()); } PROCESS_BLACKLIST = blacklist; } } return PROCESS_BLACKLIST; } ResponsiblePartySubsetType responsibleParty(GeoServerInfo global, Ows11Factory f) { ResponsiblePartySubsetType rp = f.createResponsiblePartySubsetType(); return rp; } }
/** * <code>GeoServer</code> users/groups security service, backed by <a * href="http://www.csipiemonte.it/">CSI</a> <code>IRIDE</code> service <code>JUnit</code> Test. * * @author "Simone Cornacchia - [email protected], [email protected] * (CSI:71740)" */ @RunWith(SpringJUnit4ClassRunner.class) @ContextConfiguration( locations = { "classpath:/testContext.xml", }) @TestExecutionListeners({DependencyInjectionTestExecutionListener.class}) public final class IrideUserGroupServiceTest { /** Logger. */ private static final Logger LOGGER = Logging.getLogger(IrideRoleServiceTest.class); private static final String SAMPLE_USER_WITH_NO_ROLES = "AAAAAA00A11M000U/CSI PIEMONTE/DEMO 32/IPA/20161027103359/2/uQ4hHIMEEruA6DGThS3EuA=="; private File tempFolder; /** * Factory that creates a new, configured, {@link IrideAuthenticationProviderFactory} instance. */ @Autowired private IrideAuthenticationProviderFactory irideAuthenticationProviderFactory; /** Factory that creates a new, configured, {@link IrideRoleService} instance. */ @Autowired private IrideRoleServiceFactory irideRoleServiceFactory; /** Factory that creates a new, configured, {@link IrideRoleService} instance. */ @Autowired private IrideUserGroupServiceFactory irideUserGroupServiceFactory; private IrideSecurityProvider securityProvider; private IrideUserGroupServiceConfig config; /** @throws java.lang.Exception */ @Before public void setUp() throws Exception { this.tempFolder = File.createTempFile("iride", "test"); this.tempFolder.delete(); this.tempFolder.mkdirs(); this.irideUserGroupServiceFactory.setSecurityManager( new GeoServerSecurityManager( new GeoServerDataDirectory(new GeoServerResourceLoader(this.tempFolder)))); this.securityProvider = new IrideSecurityProvider( this.irideAuthenticationProviderFactory, this.irideRoleServiceFactory, this.irideUserGroupServiceFactory); this.config = new IrideUserGroupServiceConfig(); this.config.setName("iride"); this.config.setClassName(IrideRoleService.class.getName()); this.config.setServerURL( "http://local-applogic-nmsf2e.csi.it/pep_wsfad_nmsf_policy/services/PolicyEnforcerBase"); this.config.setApplicationName("DECSIRA"); } /** @throws java.lang.Exception */ @After public void tearDown() throws Exception { this.tempFolder.delete(); } // TODO: implement other tests /** * Test method for {@link org.geoserver.security.iride.IrideUserGroupService#canCreateStore()}. * * @throws IOException */ @Test public void testCannotCreateStore() throws IOException { LOGGER.entering(this.getClass().getName(), "testCannotCreateStore"); assertThat(false, is(this.createUserGroupService().canCreateStore())); LOGGER.exiting(this.getClass().getName(), "testCannotCreateStore"); } /** * Test method for {@link org.geoserver.security.iride.IrideUserGroupService#canCreateStore()}. * * @throws IOException */ @Test public void testCreateStoreReturnsNull() throws IOException { LOGGER.entering(this.getClass().getName(), "testCreateStoreReturnsNull"); assertThat(this.createUserGroupService().createStore(), is(nullValue())); LOGGER.exiting(this.getClass().getName(), "testCreateStoreReturnsNull"); } /** * Test method for {@link * org.geoserver.security.iride.IrideUserGroupService#createGroupObject(String, boolean)}. * * @throws IOException */ @Test public void testCreateUserObjectSpecializedForIride() throws IOException { LOGGER.entering(this.getClass().getName(), "testCreateUserObjectSpecializedForIride"); final String password = "******"; final boolean isEnabled = true; final GeoServerUser user = this.createUserGroupService() .createUserObject(SAMPLE_USER_WITH_NO_ROLES, password, isEnabled); LOGGER.info("User: "******"testCreateUserObjectSpecializedForIride"); } /** * Test method for {@link * org.geoserver.security.iride.IrideUserGroupService#getUserByUsername(String)}. * * @throws IOException */ @Test(expected = IllegalArgumentException.class) public void testGetUserByUsernameForSampleUserWithInvalidServerURL() throws IOException { LOGGER.entering( this.getClass().getName(), "testGetUserByUsernameForSampleUserWithInvalidServerURL", new Object[] {SAMPLE_USER_WITH_NO_ROLES, this.config}); this.config.setServerURL(null); try { final GeoServerUser user = this.createUserGroupService().getUserByUsername(SAMPLE_USER_WITH_NO_ROLES); assertThat(user, is(nullValue())); } finally { LOGGER.exiting( this.getClass().getName(), "testGetUserByUsernameForSampleUserWithInvalidServerURL"); } } /** * @param string * @return * @throws IOException */ private IrideUserGroupService createUserGroupService() throws IOException { return (IrideUserGroupService) this.securityProvider.createUserGroupService(this.config); } }
/** * <code>IRIDE</code> Digital Identity utilities <code>JUnit</code> Test Case. * * @author "Simone Cornacchia - [email protected], [email protected] * (CSI:71740)" */ public final class IrideSecurityUtilsTest { /** Logger. */ private static final Logger LOGGER = Logging.getLogger(IrideSecurityUtilsTest.class); /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotInvalidIrideIdentity() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering(this.getClass().getName(), "testIsNotInvalidIrideIdentity", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(false)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotInvalidIrideIdentity"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithNullValue() { final String value = null; LOGGER.entering(this.getClass().getName(), "testIsNotValidIrideIdentityWithNullValue", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithNullValue"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithBlankValue() { final String value = BLANK; LOGGER.entering(this.getClass().getName(), "testIsNotValidIrideIdentityWithBlankValue", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithBlankValue"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithEmptyValue() { final String value = EMPTY; LOGGER.entering(this.getClass().getName(), "testIsNotValidIrideIdentityWithEmptyValue", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithEmptyValue"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithUnrecognizedValue() { final String value = "UNRECOGNIZED_VALUE"; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithUnrecognizedValue", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithUnrecognizedValue"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMissingCodiceFiscaleToken() { final String value = "AAAAAA00B77B000F"; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingCodiceFiscaleToken", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingCodiceFiscaleToken"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMissingNomeToken() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE"; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingNomeToken", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingNomeToken"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMissingCognomeToken() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20"; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingCognomeToken", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingCognomeToken"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMissingIdProviderToken() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA"; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingIdProviderToken", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingIdProviderToken"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMissingTimestampToken() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948"; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingTimestampToken", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingTimestampToken"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMissingLivelloAutenticazioneToken() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2"; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingLivelloAutenticazioneToken", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingLivelloAutenticazioneToken"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMissingMacToken() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2/"; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingMacToken", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithMissingMacToken"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithCodiceFiscaleEmpty() { final String value = "/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleEmpty", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleEmpty"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithCodiceFiscaleBlank() { final String value = BLANK + "/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleBlank", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleBlank"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithCodiceFiscaleWithInvalidFormat() { final String value = "AAAAAA00011D000L/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleWithInvalidFormat", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleWithInvalidFormat"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithNomeEmpty() { final String value = "AAAAAA00B77B000F/" + EMPTY + "/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering(this.getClass().getName(), "testIsNotValidIrideIdentityWithNomeEmpty", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithNomeEmpty"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithNomeBlank() { final String value = "AAAAAA00B77B000F/" + BLANK + "/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering(this.getClass().getName(), "testIsNotValidIrideIdentityWithNomeBlank", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithNomeBlank"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithCognomeEmpty() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/" + EMPTY + "/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithCognomeEmpty", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithCognomeEmpty"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithCognomeBlank() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/" + BLANK + "/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithCognomeBlank", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithCognomeBlank"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithIdProviderEmpty() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/" + EMPTY + "/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithIdProviderEmpty", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithIdProviderEmpty"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithIdProviderBlank() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/" + BLANK + "/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithIdProviderBlank", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithIdProviderBlank"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithTimestampEmpty() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/" + EMPTY + "/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithTimestampEmpty", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithTimestampEmpty"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithTimestampBlank() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/" + BLANK + "/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithTimestampBlank", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithTimestampBlank"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithTimestampWithInvalidFormat() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/201605311139/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithTimestampWithInvalidFormat", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithTimestampWithInvalidFormat"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithLivelloAutenticazioneEmpty() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/" + EMPTY + "/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneEmpty", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneEmpty"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithLivelloAutenticazioneBlank() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/" + BLANK + "/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneBlank", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneBlank"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithLivelloAutenticazioneWithInvalidFormat() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/A/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneWithInvalidFormat", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneWithInvalidFormat"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithLivelloAutenticazioneValue0NotInRange() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/0/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneValue0NotInRange", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneValue0NotInRange"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithLivelloAutenticazioneValue3NotInRange() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/3/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneValue3NotInRange", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithLivelloAutenticazioneValue3NotInRange"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMacEmpty() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2/" + EMPTY; LOGGER.entering(this.getClass().getName(), "testIsNotValidIrideIdentityWithMacEmpty", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithMacEmpty"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMacBlank() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2/" + BLANK; LOGGER.entering(this.getClass().getName(), "testIsNotValidIrideIdentityWithMacBlank", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithMacBlank"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithMacOfInvalidLength() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g"; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithMacOfInvalidLength", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithMacOfInvalidLength"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithCodiceFiscaleWithInvalidFormatAndWithNomeEmpty() { final String value = "AAAAAA00011D000L/" + EMPTY + "/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleWithInvalidFormatAndWithNomeEmpty", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleWithInvalidFormatAndWithNomeEmpty"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithCodiceFiscaleWithInvalidFormatAndWithNomeBlank() { final String value = "AAAAAA00011D000L/" + BLANK + "/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleWithInvalidFormatAndWithNomeBlank", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsNotValidIrideIdentityWithCodiceFiscaleWithInvalidFormatAndWithNomeBlank"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithAllNullTokens() { final String value = null + "/" + null + "/" + null + "/" + null + "/" + null + "/" + null + "/" + null; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithAllNullTokens", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithAllNullTokens"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithAllEmptyTokens() { final String value = EMPTY + "/" + EMPTY + "/" + EMPTY + "/" + EMPTY + "/" + EMPTY + "/" + EMPTY + "/" + EMPTY; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithAllEmptyTokens", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithAllEmptyTokens"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isNotValidIrideIdentity(java.lang.String)}. */ @Test public void testIsNotValidIrideIdentityWithAllBlankTokens() { final String value = BLANK + "/" + BLANK + "/" + BLANK + "/" + BLANK + "/" + BLANK + "/" + BLANK + "/" + BLANK; LOGGER.entering( this.getClass().getName(), "testIsNotValidIrideIdentityWithAllBlankTokens", value); try { final boolean result = IrideSecurityUtils.isNotValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsNotValidIrideIdentityWithAllBlankTokens"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isValidIrideIdentity(java.lang.String)}. */ @Test public void testIsValidIrideIdentity() { final String value = "AAAAAA00B77B000F/CSI PIEMONTE/DEMO 20/IPA/20160531113948/2/1IQssTaf4vNMa66qU52m7g=="; LOGGER.entering(this.getClass().getName(), "testIsValidIrideIdentity", value); try { final boolean result = IrideSecurityUtils.isValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsValidIrideIdentity"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isValidIrideIdentity(java.lang.String)}. */ @Test public void testIsValidIrideIdentityWithComplexMacToken() { final String value = "AAAAAA00A11D000L/CSI PIEMONTE/DEMO 23/IPA/20150223095441/2//VZjBdhZTwU+/7AUMNSHjQ=="; LOGGER.entering( this.getClass().getName(), "testIsValidIrideIdentityWithComplexMacToken", value); try { final boolean result = IrideSecurityUtils.isValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting(this.getClass().getName(), "testIsValidIrideIdentityWithComplexMacToken"); } } /** * Test method for {@link * org.geoserver.security.iride.util.IrideSecurityUtils#isValidIrideIdentity(java.lang.String)}. */ @Test public void testIsValidIrideIdentityWithRealisticDigitalIdentity() { final String value = "NNRLSN69P26L570X/Aldesino/Innerkofler/IPA/20160531113948/2//VZjBdhZTwU+/7AU0A8HjQ=="; LOGGER.entering( this.getClass().getName(), "testIsValidIrideIdentityWithRealisticDigitalIdentity", value); try { final boolean result = IrideSecurityUtils.isValidIrideIdentity(value); assertThat(result, is(true)); } finally { LOGGER.exiting( this.getClass().getName(), "testIsValidIrideIdentityWithRealisticDigitalIdentity"); } } }
/** * A driver for the GeoTIFF format. * * @author Simone Giannecchini, GeoSolutions * @author Jody Garnett * @source $URL: * http://svn.osgeo.org/geotools/branches/2.7.x/build/maven/javadoc/../../../modules/unsupported/coverage-experiment/geotiff/src/main/java/org/geotools/coverage/io/geotiff/GeoTiffDriver.java * $ */ public class GeoTiffDriver extends DefaultFileDriver implements FileDriver { /** Logger. */ private static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger(GeoTiffDriver.class); /** {@link Set} of supported extensions for tiff world files. */ static final Set<String> TIFF_WORLD_FILE_EXT; static final boolean JAIAvailable; static final boolean TiffAvailable; static ImageReaderSpi readerSpi; static ImageWriterSpi writerSpi; static { // check if we have JAI and or ImageIO // if these classes are here, then the runtine environment has // access to JAI and the JAI ImageI/O toolbox. boolean available = true; try { Class.forName("javax.media.jai.JAI"); } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); available = false; } JAIAvailable = available; available = true; try { Class<?> clazz = Class.forName("com.sun.media.imageioimpl.plugins.tiff.TIFFImageReaderSpi"); readerSpi = (ImageReaderSpi) clazz.newInstance(); Class<?> clazz1 = Class.forName("com.sun.media.imageioimpl.plugins.tiff.TIFFImageWriterSpi"); writerSpi = (ImageWriterSpi) clazz1.newInstance(); } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); readerSpi = null; writerSpi = null; available = false; } TiffAvailable = available; final HashSet<String> tempSet = new HashSet<String>(2); tempSet.add(".tfw"); tempSet.add(".tiffw"); tempSet.add(".wld"); TIFF_WORLD_FILE_EXT = Collections.unmodifiableSet(tempSet); } public GeoTiffDriver() { this(null); } /** Creates a new instance of GeoTiffAccessFactory. */ public GeoTiffDriver(Hints hints) { super( "GeoTIFF", "Tagged Image File Format with Geographic information", "Tagged Image File Format with Geographic information", hints, Arrays.asList("tiff", "tif")); } /** * Informs the caller whether the libraries required by the GeoTiff reader are installed or not. * * @return availability of the GeoTiff format. */ public boolean isAvailable() { return TiffAvailable; } @Override protected boolean canConnect(URL url, Map<String, Serializable> params) { if (url == null) { return false; } ImageInputStream inputStream = null; Object source = null; try { // ///////////////////////////////////////////////////////////// // // URL management // In case the URL points to a file we need to get to the file // directly and avoid caching. In case it points to http or ftp // or it is an open stream we have very small to do and we need // to enable caching. // // ///////////////////////////////////////////////////////////// if (url.getProtocol().equalsIgnoreCase("file")) { File file = urlToFile(url); if (file.exists() && file.canRead() && file.isFile()) { // setting source source = file; } else { return false; } } else if (url.getProtocol().equalsIgnoreCase("http") || url.getProtocol().equalsIgnoreCase("ftp")) { source = url.openStream(); } else { return false; } // get a stream inputStream = (ImageInputStream) ((source instanceof ImageInputStream) ? source : ImageIO.createImageInputStream(source)); if (inputStream == null) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.fine("Unable to get an ImageInputStream"); return false; } // get a reader and check if it is a geotiff inputStream.mark(); // tiff if (!readerSpi.canDecodeInput(inputStream)) { return false; } return true; } catch (Throwable e) { if (LOGGER.isLoggable(Level.FINE)) { LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); } return false; } finally { if (inputStream != null) { try { inputStream.close(); } catch (Exception e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); } } if (source != null && (source instanceof InputStream)) { try { ((InputStream) source).close(); } catch (Exception e) { if (LOGGER.isLoggable(Level.FINE)) LOGGER.log(Level.FINE, e.getLocalizedMessage(), e); } } } } @Override protected CoverageAccess connect( URL source, Map<String, Serializable> params, Hints hints, ProgressListener listener) throws IOException { final GeoTiffAccess retValue = new GeoTiffAccess(this, source, params, hints, listener, false); return retValue; } @Override protected CoverageAccess create( URL source, Map<String, Serializable> params, Hints hints, ProgressListener listener) throws IOException { return new GeoTiffAccess(this, source, params, hints, listener, true); } /** GeoTiffDriver supports the creation of new files. */ @Override protected boolean canCreate(URL url, Map<String, Serializable> params) { File file = toFile(url); if (file == null) { return false; // not a file } // check if we are trying to create a new geotiff if (file.exists()) { return false; } else { // if it does not exist let's see if we could ever create it // the best way to check I came up with has been final File parent = file.getParentFile(); return parent != null && parent.isDirectory() && parent.canWrite(); } } /** * Subclass can override to describe the parameters required to create a new Covearge. * * @return */ protected Map<String, Parameter<?>> defineCreateParameterInfo() { HashMap<String, Parameter<?>> info = new HashMap<String, Parameter<?>>(); info.put(URL.key, URL); return info; } public EnumSet<DriverOperation> getDriverCapabilities() { return EnumSet.of(DriverOperation.CONNECT, DriverOperation.CREATE); } }
/** * Initializes GeoServer configuration and catalog on startup. * * <p>This class post processes the singleton beans {@link Catalog} and {@link GeoServer}, * populating them from stored configuration. * * @author Justin Deoliveira, The Open Planning Project */ public abstract class GeoServerLoader { static Logger LOGGER = Logging.getLogger("org.geoserver"); protected GeoServerResourceLoader resourceLoader; GeoServer geoserver; XStreamPersisterFactory xpf = new XStreamPersisterFactory(); // JD: this is a hack for the moment, it is used only to maintain tests since the test setup // relies // on the old data directory structure, once the tests have been ported to the new structure // this ugly hack can die static boolean legacy = false; public GeoServerLoader(GeoServerResourceLoader resourceLoader) { this.resourceLoader = resourceLoader; } public void setApplicationContext(ApplicationContext applicationContext) throws BeansException { GeoserverDataDirectory.init((WebApplicationContext) applicationContext); } public void setXStreamPeristerFactory(XStreamPersisterFactory xpf) { this.xpf = xpf; } public static void setLegacy(boolean legacy) { GeoServerLoader.legacy = legacy; } public final Object postProcessAfterInitialization(Object bean, String beanName) throws BeansException { return bean; } public final Object postProcessBeforeInitialization(Object bean, String beanName) throws BeansException { if (bean instanceof Catalog) { // ensure this is not a wrapper but the real deal if (bean instanceof Wrapper && ((Wrapper) bean).isWrapperFor(Catalog.class)) { return bean; } // load try { Catalog catalog = (Catalog) bean; XStreamPersister xp = xpf.createXMLPersister(); xp.setCatalog(catalog); loadCatalog(catalog, xp); // initialize styles initializeStyles(catalog, xp); } catch (Exception e) { throw new RuntimeException(e); } } if (bean instanceof GeoServer) { geoserver = (GeoServer) bean; try { XStreamPersister xp = xpf.createXMLPersister(); xp.setCatalog(geoserver.getCatalog()); loadGeoServer(geoserver, xp); // load initializers loadInitializers(geoserver); } catch (Exception e) { throw new RuntimeException(e); } // initialize(); } return bean; } protected abstract void loadCatalog(Catalog catalog, XStreamPersister xp) throws Exception; protected abstract void loadGeoServer(final GeoServer geoServer, XStreamPersister xp) throws Exception; protected void loadInitializers(GeoServer geoServer) throws Exception { // load initializer extensions List<GeoServerInitializer> initializers = GeoServerExtensions.extensions(GeoServerInitializer.class); for (GeoServerInitializer initer : initializers) { try { initer.initialize(geoServer); } catch (Throwable t) { LOGGER.log(Level.SEVERE, "Failed to run initializer " + initer, t); } } } /** * Does some post processing on the catalog to ensure that the "well-known" styles are always * around. */ protected void initializeStyles(Catalog catalog, XStreamPersister xp) throws IOException { if (catalog.getStyleByName(StyleInfo.DEFAULT_POINT) == null) { initializeStyle(catalog, StyleInfo.DEFAULT_POINT, "default_point.sld"); } if (catalog.getStyleByName(StyleInfo.DEFAULT_LINE) == null) { initializeStyle(catalog, StyleInfo.DEFAULT_LINE, "default_line.sld"); } if (catalog.getStyleByName(StyleInfo.DEFAULT_POLYGON) == null) { initializeStyle(catalog, StyleInfo.DEFAULT_POLYGON, "default_line.sld"); } if (catalog.getStyleByName(StyleInfo.DEFAULT_RASTER) == null) { initializeStyle(catalog, StyleInfo.DEFAULT_RASTER, "default_raster.sld"); } } /** Copies a well known style out to the data directory and adds a catalog entry for it. */ void initializeStyle(Catalog catalog, String styleName, String sld) throws IOException { // copy the file out to the data directory if necessary if (resourceLoader.find("styles", sld) == null) { FileUtils.copyURLToFile( GeoServerLoader.class.getResource(sld), new File(resourceLoader.findOrCreateDirectory("styles"), sld)); } // create a style for it StyleInfo s = catalog.getFactory().createStyle(); s.setName(styleName); s.setFilename(sld); catalog.add(s); } public void reload() throws Exception { destroy(); // reload catalog, make sure we reload the underlying catalog, not any wrappers Catalog catalog = geoserver.getCatalog(); if (catalog instanceof Wrapper) { catalog = ((Wrapper) geoserver.getCatalog()).unwrap(Catalog.class); } XStreamPersister xp = xpf.createXMLPersister(); xp.setCatalog(catalog); loadCatalog(catalog, xp); loadGeoServer(geoserver, xp); } protected void readCatalog(Catalog catalog, XStreamPersister xp) throws Exception { // we are going to synch up the catalogs and need to preserve listeners, // but these two fellas are attached to the new catalog as well catalog.removeListeners(ResourcePool.CacheClearingListener.class); catalog.removeListeners(GeoServerPersister.class); List<CatalogListener> listeners = new ArrayList<CatalogListener>(catalog.getListeners()); // look for catalog.xml, if it exists assume we are dealing with // an old data directory File f = resourceLoader.find("catalog.xml"); if (f == null) { // assume 2.x style data directory CatalogImpl catalog2 = (CatalogImpl) readCatalog(xp); // make to remove the old resource pool catalog listener ((CatalogImpl) catalog).sync(catalog2); } else { // import old style catalog, register the persister now so that we start // with a new version of the catalog CatalogImpl catalog2 = (CatalogImpl) readLegacyCatalog(f, xp); ((CatalogImpl) catalog).sync(catalog2); } // attach back the old listeners for (CatalogListener listener : listeners) { catalog.addListener(listener); } } /** Reads the catalog from disk. */ Catalog readCatalog(XStreamPersister xp) throws Exception { CatalogImpl catalog = new CatalogImpl(); catalog.setResourceLoader(resourceLoader); xp.setCatalog(catalog); xp.setUnwrapNulls(false); CatalogFactory factory = catalog.getFactory(); // global styles loadStyles(resourceLoader.find("styles"), catalog, xp); // workspaces, stores, and resources File workspaces = resourceLoader.find("workspaces"); if (workspaces != null) { // do a first quick scan over all workspaces, setting the default File dws = new File(workspaces, "default.xml"); WorkspaceInfo defaultWorkspace = null; if (dws.exists()) { try { defaultWorkspace = depersist(xp, dws, WorkspaceInfo.class); LOGGER.info("Loaded default workspace " + defaultWorkspace.getName()); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load default workspace", e); } } else { LOGGER.warning("No default workspace was found."); } for (File wsd : list(workspaces, DirectoryFileFilter.INSTANCE)) { File f = new File(wsd, "workspace.xml"); if (!f.exists()) { continue; } WorkspaceInfo ws = null; try { ws = depersist(xp, f, WorkspaceInfo.class); catalog.add(ws); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load workspace '" + wsd.getName() + "'", e); continue; } LOGGER.info("Loaded workspace '" + ws.getName() + "'"); // load the namespace File nsf = new File(wsd, "namespace.xml"); NamespaceInfo ns = null; if (nsf.exists()) { try { ns = depersist(xp, nsf, NamespaceInfo.class); catalog.add(ns); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load namespace for '" + wsd.getName() + "'", e); } } // set the default workspace, this value might be null in the case of coming from a // 2.0.0 data directory. See http://jira.codehaus.org/browse/GEOS-3440 if (defaultWorkspace != null) { if (ws.getName().equals(defaultWorkspace.getName())) { catalog.setDefaultWorkspace(ws); if (ns != null) { catalog.setDefaultNamespace(ns); } } } else { // create the default.xml file defaultWorkspace = catalog.getDefaultWorkspace(); if (defaultWorkspace != null) { try { persist(xp, defaultWorkspace, dws); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to persist default workspace '" + wsd.getName() + "'", e); } } } // load the styles for the workspace File styles = resourceLoader.find(wsd, "styles"); if (styles != null) { loadStyles(styles, catalog, xp); } } for (File wsd : list(workspaces, DirectoryFileFilter.INSTANCE)) { // load the stores for this workspace for (File sd : list(wsd, DirectoryFileFilter.INSTANCE)) { File f = new File(sd, "datastore.xml"); if (f.exists()) { // load as a datastore DataStoreInfo ds = null; try { ds = depersist(xp, f, DataStoreInfo.class); catalog.add(ds); LOGGER.info("Loaded data store '" + ds.getName() + "'"); if (ds.isEnabled()) { // connect to the datastore to determine if we should disable it try { ds.getDataStore(null); } catch (Throwable t) { LOGGER.warning("Error connecting to '" + ds.getName() + "'. Disabling."); LOGGER.log(Level.INFO, "", t); ds.setError(t); ds.setEnabled(false); } } } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load data store '" + sd.getName() + "'", e); continue; } // load feature types for (File ftd : list(sd, DirectoryFileFilter.INSTANCE)) { f = new File(ftd, "featuretype.xml"); if (f.exists()) { FeatureTypeInfo ft = null; try { ft = depersist(xp, f, FeatureTypeInfo.class); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load feature type '" + ftd.getName() + "'", e); continue; } catalog.add(ft); LOGGER.info("Loaded feature type '" + ds.getName() + "'"); f = new File(ftd, "layer.xml"); if (f.exists()) { try { LayerInfo l = depersist(xp, f, LayerInfo.class); catalog.add(l); LOGGER.info("Loaded layer '" + l.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load layer for feature type '" + ft.getName() + "'", e); } } } else { LOGGER.warning("Ignoring feature type directory " + ftd.getAbsolutePath()); } } } else { // look for a coverage store f = new File(sd, "coveragestore.xml"); if (f.exists()) { CoverageStoreInfo cs = null; try { cs = depersist(xp, f, CoverageStoreInfo.class); catalog.add(cs); LOGGER.info("Loaded coverage store '" + cs.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load coverage store '" + sd.getName() + "'", e); continue; } // load coverages for (File cd : list(sd, DirectoryFileFilter.INSTANCE)) { f = new File(cd, "coverage.xml"); if (f.exists()) { CoverageInfo c = null; try { c = depersist(xp, f, CoverageInfo.class); catalog.add(c); LOGGER.info("Loaded coverage '" + cs.getName() + "'"); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load coverage '" + cd.getName() + "'", e); continue; } f = new File(cd, "layer.xml"); if (f.exists()) { try { LayerInfo l = depersist(xp, f, LayerInfo.class); catalog.add(l); LOGGER.info("Loaded layer '" + l.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load layer coverage '" + c.getName() + "'", e); } } } else { LOGGER.warning("Ignoring coverage directory " + cd.getAbsolutePath()); } } } else { f = new File(sd, "wmsstore.xml"); if (f.exists()) { WMSStoreInfo wms = null; try { wms = depersist(xp, f, WMSStoreInfo.class); catalog.add(wms); LOGGER.info("Loaded wmsstore '" + wms.getName() + "'"); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load wms store '" + sd.getName() + "'", e); continue; } // load wms layers for (File cd : list(sd, DirectoryFileFilter.INSTANCE)) { f = new File(cd, "wmslayer.xml"); if (f.exists()) { WMSLayerInfo wl = null; try { wl = depersist(xp, f, WMSLayerInfo.class); catalog.add(wl); LOGGER.info("Loaded wms layer'" + wl.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load wms layer '" + cd.getName() + "'", e); continue; } f = new File(cd, "layer.xml"); if (f.exists()) { try { LayerInfo l = depersist(xp, f, LayerInfo.class); catalog.add(l); LOGGER.info("Loaded layer '" + l.getName() + "'"); } catch (Exception e) { LOGGER.log( Level.WARNING, "Failed to load cascaded wms layer '" + wl.getName() + "'", e); } } } else { LOGGER.warning("Ignoring coverage directory " + cd.getAbsolutePath()); } } } else if (!isConfigDirectory(sd)) { LOGGER.warning("Ignoring store directory '" + sd.getName() + "'"); continue; } } } } // load hte layer groups for this workspace File layergroups = resourceLoader.find(wsd, "layergroups"); if (layergroups != null) { loadLayerGroups(layergroups, catalog, xp); } } } else { LOGGER.warning("No 'workspaces' directory found, unable to load any stores."); } // namespaces // layergroups File layergroups = resourceLoader.find("layergroups"); if (layergroups != null) { loadLayerGroups(layergroups, catalog, xp); } xp.setUnwrapNulls(true); catalog.resolve(); return catalog; } /** * Some config directories in GeoServer are used to store workspace specific configurations, * identify them so that we don't log complaints about their existence * * @param f * @return */ private boolean isConfigDirectory(File dir) { String name = dir.getName(); boolean result = "styles".equals(name) || "layergroups".equals(name); return result; } /** Reads the legacy (1.x) catalog from disk. */ Catalog readLegacyCatalog(File f, XStreamPersister xp) throws Exception { Catalog catalog2 = new CatalogImpl(); catalog2.setResourceLoader(resourceLoader); // add listener now as a converter which will convert from the old style // data directory to the new GeoServerPersister p = new GeoServerPersister(resourceLoader, xp); if (!legacy) { catalog2.addListener(p); } LegacyCatalogImporter importer = new LegacyCatalogImporter(catalog2); importer.setResourceLoader(resourceLoader); importer.imprt(resourceLoader.getBaseDirectory()); if (!legacy) { catalog2.removeListener(p); } if (!legacy) { // copy files from old feature type directories to new File featureTypesDir = resourceLoader.find("featureTypes"); if (featureTypesDir != null) { LegacyCatalogReader creader = new LegacyCatalogReader(); creader.read(f); Map<String, Map<String, Object>> dataStores = creader.dataStores(); for (File featureTypeDir : featureTypesDir.listFiles()) { if (!featureTypeDir.isDirectory()) { continue; } File featureTypeInfo = new File(featureTypeDir, "info.xml"); if (!featureTypeInfo.exists()) { continue; } LegacyFeatureTypeInfoReader reader = new LegacyFeatureTypeInfoReader(); reader.read(featureTypeInfo); Map<String, Object> dataStore = dataStores.get(reader.dataStore()); if (dataStore == null) { continue; } String namespace = (String) dataStore.get("namespace"); File destFeatureTypeDir = resourceLoader.find("workspaces", namespace, reader.dataStore(), reader.name()); if (destFeatureTypeDir != null) { // copy all the files over for (File file : featureTypeDir.listFiles()) { if (file.isFile() && !featureTypeInfo.equals(file)) { FileUtils.copyFile(file, new File(destFeatureTypeDir, file.getName())); } } } } } // rename catalog.xml f.renameTo(new File(f.getParentFile(), "catalog.xml.old")); } return catalog2; } protected void readConfiguration(GeoServer geoServer, XStreamPersister xp) throws Exception { // look for services.xml, if it exists assume we are dealing with // an old data directory File f = resourceLoader.find("services.xml"); if (f == null) { // assume 2.x style f = resourceLoader.find("global.xml"); if (f != null) { GeoServerInfo global = depersist(xp, f, GeoServerInfo.class); geoServer.setGlobal(global); } // load logging f = resourceLoader.find("logging.xml"); if (f != null) { LoggingInfo logging = depersist(xp, f, LoggingInfo.class); geoServer.setLogging(logging); } // load workspace specific settings File workspaces = resourceLoader.find("workspaces"); if (workspaces != null) { for (File dir : workspaces.listFiles()) { if (!dir.isDirectory() && !dir.isHidden()) continue; f = resourceLoader.find(dir, "settings.xml"); if (f != null) { SettingsInfo settings = depersist(xp, f, SettingsInfo.class); geoServer.add(settings); } } } // load services final List<XStreamServiceLoader> loaders = GeoServerExtensions.extensions(XStreamServiceLoader.class); loadServices(null, loaders, geoServer); // load services specific to workspace if (workspaces != null) { for (File dir : workspaces.listFiles()) { if (!dir.isDirectory() && !dir.isHidden()) continue; loadServices(dir, loaders, geoServer); } } } else { // add listener now as a converter which will convert from the old style // data directory to the new GeoServerPersister p = new GeoServerPersister(resourceLoader, xp); geoServer.addListener(p); // import old style services.xml new LegacyConfigurationImporter(geoServer).imprt(resourceLoader.getBaseDirectory()); geoServer.removeListener(p); // rename the services.xml file f.renameTo(new File(f.getParentFile(), "services.xml.old")); } } void loadStyles(File styles, Catalog catalog, XStreamPersister xp) { for (File sf : list(styles, new SuffixFileFilter(".xml"))) { try { // handle the .xml.xml case if (new File(styles, sf.getName() + ".xml").exists()) { continue; } StyleInfo s = depersist(xp, sf, StyleInfo.class); catalog.add(s); LOGGER.info("Loaded style '" + s.getName() + "'"); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load style from file '" + sf.getName() + "'", e); } } } void loadLayerGroups(File layergroups, Catalog catalog, XStreamPersister xp) { for (File lgf : list(layergroups, new SuffixFileFilter(".xml"))) { try { LayerGroupInfo lg = depersist(xp, lgf, LayerGroupInfo.class); if (lg.getLayers() == null || lg.getLayers().size() == 0) { LOGGER.warning("Skipping empty layer group '" + lg.getName() + "', it is invalid"); continue; } catalog.add(lg); LOGGER.info("Loaded layer group '" + lg.getName() + "'"); } catch (Exception e) { LOGGER.log(Level.WARNING, "Failed to load layer group '" + lgf.getName() + "'", e); } } } void loadServices(File directory, List<XStreamServiceLoader> loaders, GeoServer geoServer) { for (XStreamServiceLoader<ServiceInfo> l : loaders) { try { ServiceInfo s = l.load(geoServer, directory); if (directory != null && s.getWorkspace() == null) continue; geoServer.add(s); LOGGER.info( "Loaded service '" + s.getId() + "', " + (s.isEnabled() ? "enabled" : "disabled")); } catch (Throwable t) { LOGGER.log( Level.SEVERE, "Failed to load the service configuration in directory: " + directory.getPath(), t); } } } /** Helper method which uses xstream to persist an object as xml on disk. */ void persist(XStreamPersister xp, Object obj, File f) throws Exception { BufferedOutputStream out = new BufferedOutputStream(new FileOutputStream(f)); xp.save(obj, out); out.flush(); out.close(); } /** Helper method which uses xstream to depersist an object as xml from disk. */ <T> T depersist(XStreamPersister xp, File f, Class<T> clazz) throws IOException { BufferedInputStream in = new BufferedInputStream(new FileInputStream(f)); try { return xp.load(in, clazz); } finally { in.close(); } } /** Helper method for listing files in a directory. */ Collection<File> list(File d, IOFileFilter filter) { if (d == null) { return Collections.EMPTY_LIST; } ArrayList<File> files = new ArrayList(); for (File f : d.listFiles()) { if (filter.accept(f)) { files.add(f); } } return files; } public void destroy() throws Exception { // dispose geoserver.dispose(); } }
/** * A registry for factories, organized by categories (usualy by <strong>interface</strong>). For * example <code>{@linkplain org.opengis.referencing.crs.CRSFactory}.class</code> is a category, and * <code>{@linkplain org.opengis.referencing.operation.MathTransformFactory}.class</code> is an * other category. * * <p>For each category, implementations are registered in a file placed in the {@code * META-INF/services/} directory, as specified in the {@link ServiceRegistry} javadoc. Those files * are usually bundled into the JAR file distributed by the vendor. If the same {@code * META-INF/services/} file appears many time in different JARs, they are processed as if their * content were merged. * * <p>Example use: * * <blockquote> * * <code> * Set<Class<?>> categories = * Collections.singleton(new Class<?>[] {<br> * &npsp;&npsp;&npsp;&npsp;MathTransformProvider.class<br> * });<br> * FactoryRegistry registry = new FactoryRegistry(categories);<br> * <br> * // get the providers<br> * Filter filter = null;<br> * Hints hints = null;<br> * Iterator<MathTransform> providers = * registry.getServiceProviders(MathTransformProvider.class, filter, hints);<br> * </code> * * </blockquote> * * <p><strong>NOTE: This class is not thread safe</strong>. Users are responsable for * synchronisation. This is usually done in an utility class wrapping this service registry (e.g. * {@link org.geotools.referencing.ReferencingFactoryFinder}). * * @since 2.1 * @source $URL$ * @version $Id$ * @author Martin Desruisseaux * @author Richard Gould * @author Jody Garnett * @see org.geotools.referencing.ReferencingFactoryFinder * @see org.geotools.coverage.CoverageFactoryFinder */ public class FactoryRegistry extends ServiceRegistry { /** The logger for all events related to factory registry. */ protected static final Logger LOGGER = Logging.getLogger("org.geotools.factory"); /** The logger level for debug messages. */ private static final Level DEBUG_LEVEL = Level.FINEST; /** * A copy of the global configuration defined through {@link FactoryIteratorProviders} static * methods. We keep a copy in every {@code FactoryRegistry} instance in order to compare against * the master {@link FactoryIteratorProviders#GLOBAL} and detect if the configuration changed * since the last time this registry was used. * * @see #synchronizeIteratorProviders */ private final FactoryIteratorProviders globalConfiguration = new FactoryIteratorProviders(); /** * The set of category that need to be scanned for plugins, or {@code null} if none. On * initialization, all categories need to be scanned for plugins. After a category has been first * used, it is removed from this set so we don't scan for plugins again. */ private Set<Class<?>> needScanForPlugins; /** * Categories under scanning. This is used by {@link #scanForPlugins(Collection,Class)} as a guard * against infinite recursivity (i.e. when a factory to be scanned request an other dependency of * the same category). */ private final RecursionCheckingHelper scanningCategories = new RecursionCheckingHelper(); /** * Factories under testing for availability. This is used by {@link #isAvailable} as a guard * against infinite recursivity. */ private final RecursionCheckingHelper testingAvailability = new RecursionCheckingHelper(); /** * Factories under testing for hints compatibility. This is used by {@link #usesAcceptableHints} * as a guard against infinite recursivity. */ private final RecursionCheckingHelper testingHints = new RecursionCheckingHelper(); /** * Constructs a new registry for the specified category. * * @param category The single category. * @since 2.4 */ @SuppressWarnings("unchecked") public FactoryRegistry(final Class<?> category) { this((Collection) Collections.singleton(category)); } /** * Constructs a new registry for the specified categories. * * @param categories The categories. * @since 2.4 */ public FactoryRegistry(final Class<?>[] categories) { this(Arrays.asList(categories)); } /** * Constructs a new registry for the specified categories. * * @param categories The categories. */ public FactoryRegistry(final Collection<Class<?>> categories) { super(categories.iterator()); for (final Iterator<Class<?>> it = getCategories(); it.hasNext(); ) { if (needScanForPlugins == null) { needScanForPlugins = new HashSet<Class<?>>(); } needScanForPlugins.add(it.next()); } } /** * Returns the providers in the registry for the specified category, filter and hints. Providers * that are not {@linkplain OptionalFactory#isAvailable available} will be ignored. This method * will {@linkplain #scanForPlugins() scan for plugins} the first time it is invoked for the given * category. * * @param <T> The class represented by the {@code category} argument. * @param category The category to look for. Usually an interface class (not the actual * implementation class). * @param filter The optional filter, or {@code null}. * @param hints The optional user requirements, or {@code null}. * @return Factories ready to use for the specified category, filter and hints. * @since 2.3 */ public synchronized <T> Iterator<T> getServiceProviders( final Class<T> category, final Filter filter, final Hints hints) { /* * The implementation of this method is very similar to the 'getUnfilteredProviders' * one except for filter handling. See the comments in 'getUnfilteredProviders' for * more implementation details. */ if (scanningCategories.contains(category)) { // Please note you will get errors here if you accidentally allow // more than one thread to use your FactoryRegistry at a time. throw new RecursiveSearchException(category); } final Filter hintsFilter = new Filter() { public boolean filter(final Object provider) { return isAcceptable(category.cast(provider), category, hints, filter); } }; synchronizeIteratorProviders(); scanForPluginsIfNeeded(category); return getServiceProviders(category, hintsFilter, true); } /** * Implementation of {@link #getServiceProviders(Class, Filter, Hints)} without the filtering * applied by the {@link #isAcceptable(Object, Class, Hints, Filter)} method. If this filtering is * not already presents in the filter given to this method, then it must be applied on the * elements returned by the iterator. The later is preferrable when: * * <p> * * <ul> * <li>There is some cheaper tests to perform before {@code isAcceptable}. * <li>We don't want a restrictive filter in order to avoid trigging a classpath scan if this * method doesn't found any element to iterate. * </ul> * * <p><b>Note:</b> {@link #synchronizeIteratorProviders} should also be invoked once before this * method. * * @todo Use Hints to match Constructor. */ final <T> Iterator<T> getUnfilteredProviders(final Class<T> category) { /* * If the map is not empty, then this mean that a scanning is under progress, i.e. * 'scanForPlugins' is currently being executed. This is okay as long as the user * is not asking for one of the categories under scanning. Otherwise, the answer * returned by 'getServiceProviders' would be incomplete because not all plugins * have been found yet. This can lead to some bugs hard to spot because this methoud * could complete normally but return the wrong plugin. It is safer to thrown an * exception so the user is advised that something is wrong. */ if (scanningCategories.contains(category)) { throw new RecursiveSearchException(category); } scanForPluginsIfNeeded(category); return getServiceProviders(category, true); } /** * Returns the first provider in the registry for the specified category, using the specified map * of hints (if any). This method may {@linkplain #scanForPlugins scan for plugins} the first time * it is invoked. Except as a result of this scan, no new provider instance is created by the * default implementation of this method. The {@link FactoryCreator} class change this behavior * however. * * @param <T> The class represented by the {@code category} argument. * @param category The category to look for. Must be one of the categories declared to the * constructor. Usually an interface class (not the actual implementation class). * @param filter An optional filter, or {@code null} if none. This is used for example in order to * select the first factory for some {@linkplain * org.opengis.referencing.AuthorityFactory#getAuthority authority}. * @param hints A {@linkplain Hints map of hints}, or {@code null} if none. * @param key The key to use for looking for a user-provided instance in the hints, or {@code * null} if none. * @return A factory {@linkplain OptionalFactory#isAvailable available} for use for the specified * category and hints. The returns type is {@code Object} instead of {@link Factory} because * the factory implementation doesn't need to be a Geotools one. * @throws FactoryNotFoundException if no factory was found for the specified category, filter and * hints. * @throws FactoryRegistryException if a factory can't be returned for some other reason. * @see #getServiceProviders(Class, Filter, Hints) * @see FactoryCreator#getServiceProvider */ public <T> T getServiceProvider( final Class<T> category, final Filter filter, Hints hints, final Hints.Key key) throws FactoryRegistryException { synchronizeIteratorProviders(); final boolean debug = LOGGER.isLoggable(DEBUG_LEVEL); if (debug) { /* * We are not required to insert the method name ("GetServiceProvider") in the * message because it is part of the informations already stored by LogRecord, * and formatted by the default java.util.logging.SimpleFormatter. * * Conventions for the message part according java.util.logging.Logger javadoc: * - "ENTRY" at the begining of a method. * - "RETURN" at the end of a method, if successful. * - "THROW" in case of failure. * - "CHECK" ... is our own addition to Sun's convention for this method ... */ debug("ENTRY", category, key, null, null); } Class<?> implementation = null; if (key != null) { /* * Sanity check: make sure that the key class is appropriate for the category. */ final Class<?> valueClass = key.getValueClass(); if (!category.isAssignableFrom(valueClass)) { if (debug) { debug("THROW", category, key, "unexpected type:", valueClass); } throw new IllegalArgumentException(Errors.format(ErrorKeys.ILLEGAL_KEY_$1, key)); } if (hints != null) { final Object hint = hints.get(key); if (hint != null) { if (debug) { debug("CHECK", category, key, "user provided a", hint.getClass()); } if (category.isInstance(hint)) { /* * The factory implementation was given explicitly by the user. * Nothing to do; we are done. */ if (debug) { debug("RETURN", category, key, "return hint as provided.", null); } return category.cast(hint); } /* * Before to pass the hints to the private 'getServiceImplementation' method, * remove the hint for the user-supplied key. This is because this hint has * been processed by this public 'getServiceProvider' method, and the policy * is to remove the processed hints before to pass them to child dependencies * (see the "Check recursively in factory dependencies" comment elswhere in * this class). * * Use case: DefaultDataSourceTest invokes indirectly 'getServiceProvider' * with a "CRS_AUTHORITY_FACTORY = ThreadedEpsgFactory.class" hint. However * ThreadedEpsgFactory (in the org.geotools.referencing.factory.epsg package) * is a wrapper around DirectEpsgFactory, and defines this dependency through * a "CRS_AUTHORITY_FACTORY = DirectEpsgFactory.class" hint. There is no way * to match this hint for both factories in same time. Since we must choose * one, we assume that the user is interrested in the most top level one and * discart this particular hint for the dependencies. */ hints = new Hints(hints); if (hints.remove(key) != hint) { // Should never happen except on concurrent modification in an other thread. throw new AssertionError(key); } /* * If the user accepts many implementation classes, then try all of them in * the preference order given by the user. The last class (or the singleton * if the hint was not an array) will be tried using the "normal" path * (oustide the loop) in order to get the error message in case of failure. */ if (hint instanceof Class<?>[]) { final Class<?>[] types = (Class<?>[]) hint; final int length = types.length; for (int i = 0; i < length - 1; i++) { final Class<?> type = types[i]; if (debug) { debug("CHECK", category, key, "consider hint[" + i + ']', type); } final T candidate = getServiceImplementation(category, type, filter, hints); if (candidate != null) { if (debug) { debug("RETURN", category, key, "found implementation", candidate.getClass()); } return candidate; } } if (length != 0) { implementation = types[length - 1]; // Last try to be done below. } } else { implementation = (Class<?>) hint; } } } } if (debug && implementation != null) { debug("CHECK", category, key, "consider hint[last]", implementation); } final T candidate = getServiceImplementation(category, implementation, filter, hints); if (candidate != null) { if (debug) { debug("RETURN", category, key, "found implementation", candidate.getClass()); } return candidate; } if (debug) { debug("THROW", category, key, "could not find implementation.", null); } throw new FactoryNotFoundException( Errors.format( ErrorKeys.FACTORY_NOT_FOUND_$1, implementation != null ? implementation : category)); } /** * Logs a debug message for {@link #getServiceProvider} method. Note: we are not required to * insert the method name ({@code "GetServiceProvider"}) in the message because it is part of the * informations already stored by {@link LogRecord}, and formatted by the default {@link * java.util.logging.SimpleFormatter}. * * @param status {@code "ENTRY"}, {@code "RETURN"} or {@code "THROW"}, according {@link Logger} * conventions. * @param category The category given to the {@link #getServiceProvider} method. * @param key The key being examined, or {@code null}. * @param message Optional message, or {@code null} if none. * @param type Optional class to format after the message, or {@code null}. */ private static void debug( final String status, final Class<?> category, final Hints.Key key, final String message, final Class type) { final StringBuilder buffer = new StringBuilder(status); buffer .append(Utilities.spaces(Math.max(1, 7 - status.length()))) .append('(') .append(Classes.getShortName(category)); if (key != null) { buffer.append(", ").append(key); } buffer.append(')'); if (message != null) { buffer.append(": ").append(message); } if (type != null) { buffer.append(' ').append(Classes.getShortName(type)).append('.'); } final LogRecord record = new LogRecord(DEBUG_LEVEL, buffer.toString()); record.setSourceClassName(FactoryRegistry.class.getName()); record.setSourceMethodName("getServiceProvider"); record.setLoggerName(LOGGER.getName()); LOGGER.log(record); } /** * Searchs the first implementation in the registery matching the specified conditions. This * method is invoked only by the {@link #getServiceProvider(Class, Filter, Hints, Hints.Key)} * public method above; there is no recursivity there. This method do not creates new instance if * no matching factory is found. * * @param category The category to look for. Usually an interface class. * @param implementation The desired class for the implementation, or {@code null} if none. * @param filter An optional filter, or {@code null} if none. * @param hints A {@linkplain Hints map of hints}, or {@code null} if none. * @return A factory for the specified category and hints, or {@code null} if none. */ private <T> T getServiceImplementation( final Class<T> category, final Class<?> implementation, final Filter filter, final Hints hints) { for (final Iterator<T> it = getUnfilteredProviders(category); it.hasNext(); ) { final T candidate = it.next(); // Implementation class must be tested before 'isAcceptable' // in order to avoid StackOverflowError in some situations. if (implementation != null && !implementation.isInstance(candidate)) { continue; } if (!isAcceptable(candidate, category, hints, filter)) { continue; } return candidate; } final List<Reference<T>> cached = getCachedProviders(category); if (cached != null) { /* * Checks if a factory previously created by FactoryCreator could fit. This * block should never be executed if this instance is not a FactoryCreator. */ for (final Iterator<Reference<T>> it = cached.iterator(); it.hasNext(); ) { final T candidate = it.next().get(); if (candidate == null) { it.remove(); continue; } if (implementation != null && !implementation.isInstance(candidate)) { continue; } if (!isAcceptable(candidate, category, hints, filter)) { continue; } return candidate; } } return null; } /** * Returns the providers available in the cache, or {@code null} if none. To be overridden by * {@link FactoryCreator} only. */ <T> List<Reference<T>> getCachedProviders(final Class<T> category) { return null; } /** * Returns {@code true} is the specified {@code factory} meets the requirements specified by a map * of {@code hints} and the filter. This method is the entry point for the following public * methods: * * <ul> * <li>Singleton {@link #getServiceProvider (Class category, Filter, Hints, Hints.Key)} * <li>Iterator {@link #getServiceProviders(Class category, Filter, Hints)} * </ul> * * @param candidate The factory to checks. * @param category The factory category. Usually an interface. * @param hints The optional user requirements, or {@code null}. * @param filter The optional filter, or {@code null}. * @return {@code true} if the {@code factory} meets the user requirements. */ final <T> boolean isAcceptable( final T candidate, final Class<T> category, final Hints hints, final Filter filter) { if (filter != null && !filter.filter(candidate)) { return false; } /* * Note: isAvailable(...) must be tested before checking the hints, because in current * Geotools implementation (especially DeferredAuthorityFactory), some hints computation * are deferred until a connection to the database is etablished (which 'isAvailable' * does in order to test the connection). */ if (!isAvailable(candidate)) { return false; } if (hints != null) { if (candidate instanceof Factory) { if (!usesAcceptableHints((Factory) candidate, category, hints, (Set<Factory>) null)) { return false; } } } /* * Checks for optional user conditions supplied in FactoryRegistry subclasses. */ return isAcceptable(candidate, category, hints); } /** * Returns {@code true} is the specified {@code factory} meets the requirements specified by a map * of {@code hints}. This method checks only the hints; it doesn't check the {@link Filter}, the * {@linkplain OptionalFactory#isAvailable availability} or the user-overrideable {@link * #isAcceptable(Object, Class, Hints)} method. This method invokes itself recursively. * * @param factory The factory to checks. * @param category The factory category. Usually an interface. * @param hints The user requirements ({@code null} not allowed). * @param alreadyDone Should be {@code null} except on recursive calls (for internal use only). * @return {@code true} if the {@code factory} meets the hints requirements. */ private boolean usesAcceptableHints( final Factory factory, final Class<?> category, final Hints hints, Set<Factory> alreadyDone) { /* * Ask for implementation hints with special care against infinite recursivity. * Some implementations use deferred algorithms fetching dependencies only when * first needed. The call to getImplementationHints() is sometime a trigger for * fetching dependencies (in order to return accurate hints). For example the * BufferedCoordinateOperationFactory implementation asks for an other instance * of CoordinateOperationFactory, the instance to cache behind a buffer, which * should not be itself. Of course BufferedCoordinateOperation will checks that * it is not caching itself, but its test happen too late for preventing a never- * ending loop if we don't put a 'testingHints' guard here. It is also a safety * against broken factory implementations. */ if (!testingHints.addAndCheck(factory)) { return false; } final Map<RenderingHints.Key, ?> implementationHints; try { implementationHints = Hints.stripNonKeys(factory.getImplementationHints()); } finally { testingHints.removeAndCheck(factory); } if (implementationHints == null) { // factory was bad and did not meet contract - assume it used no Hints return true; } /* * We got the implementation hints. Now tests their compatibility. */ Hints remaining = null; for (final Map.Entry<?, ?> entry : implementationHints.entrySet()) { final Object key = entry.getKey(); final Object value = entry.getValue(); final Object expected = hints.get(key); if (expected != null) { /* * We have found a hint that matter. Check if the * available factory meets the user's criterions. */ if (expected instanceof Class<?>) { if (!((Class<?>) expected).isInstance(value)) { return false; } } else if (expected instanceof Class<?>[]) { final Class<?>[] types = (Class<?>[]) expected; int i = 0; do if (i >= types.length) return false; while (!types[i++].isInstance(value)); } else if (!expected.equals(value)) { return false; } } /* * Checks recursively in factory dependencies, if any. Note that the dependencies * will be checked against a subset of user's hints. More specifically, all hints * processed by the current pass will NOT be passed to the factories dependencies. * This is because the same hint may appears in the "parent" factory and a "child" * dependency with different value. For example the FORCE_LONGITUDE_FIRST_AXIS_ORDER * hint has the value TRUE in OrderedAxisAuthorityFactory, but the later is basically * a wrapper around the ThreadedEpsgFactory (typically), which has the value FALSE * for the same hint. * * Additional note: The 'alreadyDone' set is a safety against cyclic dependencies, * in order to protect ourself against never-ending loops. This is not the same * kind of dependencies than 'testingHints'. It is a "factory A depends on factory * B which depends on factory A" loop, which is legal. */ if (value instanceof Factory) { final Factory dependency = (Factory) value; if (alreadyDone == null) { alreadyDone = new HashSet<Factory>(); } if (!alreadyDone.contains(dependency)) { alreadyDone.add(factory); if (remaining == null) { remaining = new Hints(hints); remaining.keySet().removeAll(implementationHints.keySet()); } final Class<?> type; if (key instanceof Hints.Key) { type = ((Hints.Key) key).getValueClass(); } else { type = Factory.class; // Kind of unknown factory type... } // Recursive call to this method for scanning dependencies. if (!usesAcceptableHints(dependency, type, remaining, alreadyDone)) { return false; } } } } return true; } /** * Returns {@code true} if the specified {@code provider} meets the requirements specified by a * map of {@code hints}. The default implementation always returns {@code true}. There is no need * to override this method for {@link AbstractFactory} implementations, since their hints are * automatically checked. Override this method for non-Geotools implementations. For example a JTS * geometry factory finder may overrides this method in order to check if a {@link * com.vividsolutions.jts.geom.GeometryFactory} uses the required {@link * com.vividsolutions.jts.geom.CoordinateSequenceFactory}. Such method should be implemented as * below, since this method may be invoked for various kind of objects: * * <blockquote> * * <pre> * if (provider instanceof GeometryFactory) { * // ... Check the GeometryFactory state here. * } * </pre> * * </blockquote> * * @param <T> The class represented by the {@code category} argument. * @param provider The provider to checks. * @param category The factory category. Usually an interface. * @param hints The user requirements, or {@code null} if none. * @return {@code true} if the {@code provider} meets the user requirements. */ protected <T> boolean isAcceptable(final T provider, final Class<T> category, final Hints hints) { return true; } /** Returns {@code true} if the specified factory is available. */ private boolean isAvailable(final Object provider) { if (!(provider instanceof OptionalFactory)) { return true; } final OptionalFactory factory = (OptionalFactory) provider; final Class<? extends OptionalFactory> type = factory.getClass(); if (!testingAvailability.addAndCheck(type)) { throw new RecursiveSearchException(type); } try { return factory.isAvailable(); } finally { testingAvailability.removeAndCheck(type); } } /** * Returns all class loaders to be used for scanning plugins. Current implementation returns the * following class loaders: * * <p> * * <ul> * <li>{@linkplain Class#getClassLoader This object class loader} * <li>{@linkplain Thread#getContextClassLoader The thread context class loader} * <li>{@linkplain ClassLoader#getSystemClassLoader The system class loader} * </ul> * * The actual number of class loaders may be smaller if redundancies was found. If some more * classloaders should be scanned, they shall be added into the code of this method. * * @return All classloaders to be used for scanning plugins. */ public final Set<ClassLoader> getClassLoaders() { final Set<ClassLoader> loaders = new HashSet<ClassLoader>(); for (int i = 0; i < 4; i++) { final ClassLoader loader; try { switch (i) { case 0: loader = getClass().getClassLoader(); break; case 1: loader = FactoryRegistry.class.getClassLoader(); break; case 2: loader = Thread.currentThread().getContextClassLoader(); break; case 3: loader = ClassLoader.getSystemClassLoader(); break; // Add any supplementary class loaders here, if needed. default: throw new AssertionError(i); // Should never happen. } } catch (SecurityException exception) { // We are not allowed to get a class loader. // Continue; some other class loader may be available. continue; } loaders.add(loader); } loaders.remove(null); loaders.addAll(GeoTools.getClassLoaders()); /* * We now have a set of class loaders with duplicated object already removed * (e.g. system classloader == context classloader). However, we may still * have an other form of redundancie. A class loader may be the parent of an * other one. Try to remove those dependencies. */ final ClassLoader[] asArray = loaders.toArray(new ClassLoader[loaders.size()]); for (int i = 0; i < asArray.length; i++) { ClassLoader loader = asArray[i]; try { while ((loader = loader.getParent()) != null) { loaders.remove(loader); } } catch (SecurityException exception) { // We are not allowed to fetch the parent class loader. // Ignore (some redundancies may remains). } } if (loaders.isEmpty()) { LOGGER.warning("No class loaders available."); } return loaders; } /** * Scans for factory plug-ins on the application class path. This method is needed because the * application class path can theoretically change, or additional plug-ins may become available. * Rather than re-scanning the classpath on every invocation of the API, the class path is scanned * automatically only on the first invocation. Clients can call this method to prompt a re-scan. * Thus this method need only be invoked by sophisticated applications which dynamically make new * plug-ins available at runtime. */ public void scanForPlugins() { final Set<ClassLoader> loaders = getClassLoaders(); for (final Iterator<Class<?>> categories = getCategories(); categories.hasNext(); ) { final Class<?> category = categories.next(); scanForPlugins(loaders, category); } } /** * Scans for factory plug-ins of the given category, with guard against recursivities. The * recursivity check make debugging easier than inspecting a {@link StackOverflowError}. * * @param loader The class loader to use. * @param category The category to scan for plug-ins. */ private <T> void scanForPlugins(final Collection<ClassLoader> loaders, final Class<T> category) { if (!scanningCategories.addAndCheck(category)) { throw new RecursiveSearchException(category); } try { final StringBuilder message = getLogHeader(category); boolean newServices = false; /* * First, scan META-INF/services directories (the default mechanism). */ for (final ClassLoader loader : loaders) { newServices |= register(lookupProviders(category, loader), category, message); newServices |= registerFromSystemProperty(loader, category, message); } /* * Next, query the user-provider iterators, if any. */ final FactoryIteratorProvider[] fip = FactoryIteratorProviders.getIteratorProviders(); for (int i = 0; i < fip.length; i++) { final Iterator<T> it = fip[i].iterator(category); if (it != null) { newServices |= register(it, category, message); } } /* * Finally, log the list of registered factories. */ if (newServices) { log("scanForPlugins", message); } } finally { scanningCategories.removeAndCheck(category); } } /** * Scans the given category for plugins only if needed. After this method has been invoked once * for a given category, it will no longer scan for that category. */ private <T> void scanForPluginsIfNeeded(final Class<?> category) { if (needScanForPlugins != null && needScanForPlugins.remove(category)) { if (needScanForPlugins.isEmpty()) { needScanForPlugins = null; } scanForPlugins(getClassLoaders(), category); } } /** * {@linkplain #registerServiceProvider Registers} all factories given by the supplied iterator. * * @param factories The factories (or "service providers") to register. * @param category the category under which to register the providers. * @param message A buffer where to write the logging message. * @return {@code true} if at least one factory has been registered. */ private <T> boolean register( final Iterator<T> factories, final Class<T> category, final StringBuilder message) { boolean newServices = false; final String lineSeparator = System.getProperty("line.separator", "\n"); while (factories.hasNext()) { T factory; try { factory = factories.next(); } catch (OutOfMemoryError error) { // Makes sure that we don't try to handle this error. throw error; } catch (NoClassDefFoundError error) { /* * A provider can't be registered because of some missing dependencies. * This occurs for example when trying to register the WarpTransform2D * math transform on a machine without JAI installation. Since the factory * may not be essential (this is the case of WarpTransform2D), just skip it. */ loadingFailure(category, error, false); continue; } catch (ExceptionInInitializerError error) { /* * If an exception occured during class initialization, log the cause. * The ExceptionInInitializerError alone doesn't help enough. */ final Throwable cause = error.getCause(); if (cause != null) { loadingFailure(category, cause, true); } throw error; } catch (Error error) { if (!Classes.getShortClassName(error).equals("ServiceConfigurationError")) { // We want to handle sun.misc.ServiceConfigurationError only. Unfortunatly, we // need to rely on reflection because this error class is not a commited API. // TODO: Check if the error is catchable with JSE 6. throw error; } /* * Failed to register a factory for a reason probably related to the plugin * initialisation. It may be some factory-dependent missing resources. */ loadingFailure(category, error, true); continue; } if (category.isAssignableFrom(factory.getClass())) { final Class<? extends T> factoryClass = factory.getClass().asSubclass(category); /* * If the factory implements more than one interface and an * instance were already registered, reuse the same instance * instead of duplicating it. */ final T replacement = getServiceProviderByClass(factoryClass); if (replacement != null) { factory = replacement; // Need to register anyway, because the category may not be // the same. } if (registerServiceProvider(factory, category)) { /* * The factory is now registered. Add it to the message to * be logged. We will log all factories together in a single * log event because some registration (e.g. * MathTransformProviders) would be otherwise quite verbose. */ message.append(lineSeparator); message.append(" "); message.append(factoryClass.getName()); newServices = true; } } } return newServices; } /** * If a system property was setup, load the class (if not already registered) and move it in front * of any other factory. This is done for compatibility with legacy {@code FactoryFinder} * implementation. * * @param loader The class loader to use. * @param category The category to scan for plug-ins. * @param message A buffer where to write the logging message. * @return {@code true} if at least one factory has been registered. */ private <T> boolean registerFromSystemProperty( final ClassLoader loader, final Class<T> category, final StringBuilder message) { boolean newServices = false; try { final String classname = System.getProperty(category.getName()); if (classname != null) try { final Class<?> candidate = loader.loadClass(classname); if (category.isAssignableFrom(candidate)) { final Class<? extends T> factoryClass = candidate.asSubclass(category); T factory = getServiceProviderByClass(factoryClass); if (factory == null) try { factory = factoryClass.newInstance(); if (registerServiceProvider(factory, category)) { message.append(System.getProperty("line.separator", "\n")); message.append(" "); message.append(factoryClass.getName()); newServices = true; } } catch (IllegalAccessException exception) { throw new FactoryRegistryException( Errors.format(ErrorKeys.CANT_CREATE_FACTORY_$1, classname), exception); } catch (InstantiationException exception) { throw new FactoryRegistryException( Errors.format(ErrorKeys.CANT_CREATE_FACTORY_$1, classname), exception); } /* * Put this factory in front of every other factories (including the ones loaded * in previous class loaders, which is why we don't inline this ordering in the * 'register' loop). Note: if some factories were not yet registered, they will * not be properly ordered. Since this code exists more for compatibility reasons * than as a commited API, we ignore this short comming for now. */ for (final Iterator<T> it = getServiceProviders(category, false); it.hasNext(); ) { final T other = it.next(); if (other != factory) { setOrdering(category, factory, other); } } } } catch (ClassNotFoundException exception) { // The class has not been found, maybe because we are not using the appropriate // class loader. Ignore (do not thrown an exception), in order to give a chance // to the caller to invokes this method again with a different class loader. } } catch (SecurityException exception) { // We are not allowed to read property, probably // because we are running in an applet. Ignore... } return newServices; } /** Invoked when a factory can't be loaded. Log a warning, but do not stop the process. */ private static void loadingFailure( final Class<?> category, final Throwable error, final boolean showStackTrace) { final String name = Classes.getShortName(category); final StringBuilder cause = new StringBuilder(Classes.getShortClassName(error)); final String message = error.getLocalizedMessage(); if (message != null) { cause.append(": "); cause.append(message); } final LogRecord record = Loggings.format(Level.WARNING, LoggingKeys.CANT_LOAD_SERVICE_$2, name, cause.toString()); if (showStackTrace) { record.setThrown(error); } record.setSourceClassName(FactoryRegistry.class.getName()); record.setSourceMethodName("scanForPlugins"); record.setLoggerName(LOGGER.getName()); LOGGER.log(record); } /** Prepares a message to be logged if any provider has been registered. */ private static StringBuilder getLogHeader(final Class<?> category) { return new StringBuilder( Loggings.getResources(null).getString(LoggingKeys.FACTORY_IMPLEMENTATIONS_$1, category)); } /** Log the specified message after all provider for a given category have been registered. */ private static void log(final String method, final StringBuilder message) { final LogRecord record = new LogRecord(Level.CONFIG, message.toString()); record.setSourceClassName(FactoryRegistry.class.getName()); record.setSourceMethodName(method); record.setLoggerName(LOGGER.getName()); LOGGER.log(record); } /** * Synchronizes the content of the {@link #globalConfiguration} with {@link * FactoryIteratorProviders#GLOBAL}. New providers are {@linkplain #register registered} * immediately. Note that this method is typically invoked in a different thread than {@link * FactoryIteratorProviders} method calls. * * @see FactoryIteratorProviders#addFactoryIteratorProvider */ private void synchronizeIteratorProviders() { final FactoryIteratorProvider[] newProviders = globalConfiguration.synchronizeIteratorProviders(); if (newProviders == null) { return; } for (final Iterator<Class<?>> categories = getCategories(); categories.hasNext(); ) { final Class<?> category = categories.next(); if (needScanForPlugins == null || !needScanForPlugins.contains(category)) { /* * Register immediately the factories only if some other factories were already * registered for this category, because in such case scanForPlugin() will not * be invoked automatically. If no factory are registered for this category, do * nothing - we will rely on the lazy invocation of scanForPlugins() when first * needed. We perform this check because getServiceProviders(category).hasNext() * is the criterion used by FactoryRegistry in order to decide if it should invoke * automatically scanForPlugins(). */ for (int i = 0; i < newProviders.length; i++) { register(newProviders[i], category); } } } } /** Registers every factories from the specified provider for the given category. */ private <T> void register(final FactoryIteratorProvider provider, final Class<T> category) { final Iterator<T> it = provider.iterator(category); if (it != null) { final StringBuilder message = getLogHeader(category); if (register(it, category, message)) { log("synchronizeIteratorProviders", message); } } } /** * Set pairwise ordering between all factories according a comparator. Calls to <code> * {@linkplain Comparator#compare compare}(factory1, factory2)</code> should returns: * * <ul> * <li>{@code -1} if {@code factory1} is preferred to {@code factory2} * <li>{@code +1} if {@code factory2} is preferred to {@code factory1} * <li>{@code 0} if there is no preferred order between {@code factory1} and {@code factory2} * </ul> * * @param <T> The class represented by the {@code category} argument. * @param category The category to set ordering. * @param comparator The comparator to use for ordering. * @return {@code true} if at least one ordering setting has been modified as a consequence of * this call. */ public <T> boolean setOrdering(final Class<T> category, final Comparator<T> comparator) { boolean set = false; final List<T> previous = new ArrayList<T>(); for (final Iterator<T> it = getServiceProviders(category, false); it.hasNext(); ) { final T f1 = it.next(); for (int i = previous.size(); --i >= 0; ) { final T f2 = previous.get(i); final int c; try { c = comparator.compare(f1, f2); } catch (ClassCastException exception) { /* * This exception is expected if the user-supplied comparator follows strictly * the java.util.Comparator specification and has determined that it can't * compare the supplied factories. From ServiceRegistry point of view, it just * means that the ordering between those factories will stay undeterminated. */ continue; } if (c > 0) { set |= setOrdering(category, f1, f2); } else if (c < 0) { set |= setOrdering(category, f2, f1); } } previous.add(f1); } return set; } /** * Sets or unsets a pairwise ordering between all factories meeting a criterion. For example in * the CRS framework ({@link org.geotools.referencing.FactoryFinder}), this is used for setting * ordering between all factories provided by two vendors, or for two authorities. If one or both * factories are not currently registered, or if the desired ordering is already set/unset, * nothing happens and false is returned. * * @param <T> The class represented by the {@code base} argument. * @param base The base category. Only categories {@linkplain Class#isAssignableFrom assignable} * to {@code base} will be processed. * @param set {@code true} for setting the ordering, or {@code false} for unsetting. * @param service1 Filter for the preferred factory. * @param service2 Filter for the factory to which {@code service1} is preferred. * @return {@code true} if the ordering changed as a result of this call. */ public <T> boolean setOrdering( final Class<T> base, final boolean set, final Filter service1, final Filter service2) { boolean done = false; for (final Iterator<Class<?>> categories = getCategories(); categories.hasNext(); ) { final Class<?> candidate = categories.next(); if (base.isAssignableFrom(candidate)) { final Class<? extends T> category = candidate.asSubclass(base); done |= setOrUnsetOrdering(category, set, service1, service2); } } return done; } /** Helper method for the above. */ private <T> boolean setOrUnsetOrdering( final Class<T> category, final boolean set, final Filter service1, final Filter service2) { boolean done = false; T impl1 = null; T impl2 = null; for (final Iterator<? extends T> it = getServiceProviders(category, false); it.hasNext(); ) { final T factory = it.next(); if (service1.filter(factory)) impl1 = factory; if (service2.filter(factory)) impl2 = factory; if (impl1 != null && impl2 != null && impl1 != impl2) { if (set) done |= setOrdering(category, impl1, impl2); else done |= unsetOrdering(category, impl1, impl2); } } return done; } }
/** * Returns the backing-store factory for HSQL syntax. If the cached tables are not available, they * will be created now from the SQL scripts bundled in this plugin. * * @param hints A map of hints, including the low-level factories to use for CRS creation. * @return The EPSG factory using HSQL syntax. * @throws SQLException if connection to the database failed. */ protected AbstractAuthorityFactory createBackingStore(final Hints hints) throws SQLException { final DataSource source = getDataSource(); final File directory = getDirectory(source); directory.mkdirs(); if (!dataExists(directory)) { FileLock lock = null; try { // get an exclusive lock lock = acquireLock(directory); // if after getting the lock the database is still incomplete let's work on it if (!dataExists(directory)) { /* * HSQL has created automatically an empty database. We need to populate it. * Executes the SQL scripts bundled in the JAR. In theory, each line contains * a full SQL statement. For this plugin however, we have compressed "INSERT * INTO" statements using Compactor class in this package. */ final Logger logger = Logging.getLogger(LOGGER); final LogRecord record = Loggings.format(Level.INFO, LoggingKeys.CREATING_CACHED_EPSG_DATABASE_$1, VERSION); record.setLoggerName(logger.getName()); logger.log(record); ZipInputStream zin = new ZipInputStream(ThreadedH2EpsgFactory.class.getResourceAsStream(ZIP_FILE)); ZipEntry ze = null; byte[] buf = new byte[1024]; int read = 0; while ((ze = zin.getNextEntry()) != null) { File file = new File(directory, ze.getName()); if (file.exists()) { file.delete(); } FileOutputStream fout = new FileOutputStream(file); while ((read = zin.read(buf)) > 0) { fout.write(buf, 0, read); } zin.closeEntry(); fout.close(); } zin.close(); // mark the successful creation File marker = new File(directory, MARKER_FILE); if (marker.exists()) { marker.delete(); } marker.createNewFile(); setReadOnly(directory); } } catch (IOException exception) { SQLException e = new SQLException(Errors.format(ErrorKeys.CANT_READ_$1, ZIP_FILE)); e.initCause(exception); // TODO: inline cause when we will be allowed to target Java 6. throw e; } finally { if (lock != null) { try { lock.release(); lock.channel().close(); new File(directory, LOCK_FILE).delete(); } catch (IOException e) { // does not matter, was just cleanup } } } } FactoryUsingAnsiSQL factory = new FactoryUsingAnsiSQL(hints, getDataSource().getConnection()); factory.setValidationQuery("CALL NOW()"); return factory; }
/** * ArcSDEDAtaStore test cases * * @author Gabriel Roldan, Axios Engineering * @source $URL: * http://svn.geotools.org/geotools/trunk/gt/modules/plugin/arcsde/datastore/src/test/java * /org/geotools/arcsde/data/ArcSDEDataStoreTest.java $ * @version $Id$ */ public class ArcSDEDataStoreTest { /** package logger */ private static Logger LOGGER = org.geotools.util.logging.Logging.getLogger(ArcSDEDataStoreTest.class.getPackage().getName()); /** DOCUMENT ME! */ private static TestData testData; /** an ArcSDEDataStore created on setUp() to run tests against */ private DataStore store; /** a filter factory for testing */ FilterFactory ff = CommonFactoryFinder.getFilterFactory(null); @BeforeClass public static void oneTimeSetUp() throws Exception { testData = new TestData(); testData.setUp(); final boolean insertTestData = true; testData.createTempTable(insertTestData); } @AfterClass public static void oneTimeTearDown() { boolean cleanTestTable = false; boolean cleanPool = true; testData.tearDown(cleanTestTable, cleanPool); } /** * loads {@code testData/testparams.properties} into a Properties object, wich is used to obtain * test tables names and is used as parameter to find the DataStore * * @throws Exception DOCUMENT ME! */ @Before public void setUp() throws Exception { if (testData == null) { oneTimeSetUp(); } this.store = testData.getDataStore(); } @After public void tearDown() throws Exception { this.store = null; } @Test public void testDataStoreFinderFindsIt() throws IOException { DataStore sdeDs = null; DataStoreFinder.scanForPlugins(); sdeDs = DataStoreFinder.getDataStore(testData.getConProps()); assertNotNull(sdeDs); String failMsg = sdeDs + " is not an ArcSDEDataStore"; assertTrue(failMsg, (sdeDs instanceof ArcSDEDataStore)); LOGGER.fine("testFinder OK :" + sdeDs.getClass().getName()); } @Test public void testDataAccessFinderFindsIt() throws IOException { Map<String, Serializable> params = new HashMap<String, Serializable>(); params.putAll(testData.getConProps()); DataAccess<? extends FeatureType, ? extends Feature> dataStore; dataStore = DataAccessFinder.getDataStore(params); assertNotNull(dataStore); String failMsg = dataStore + " is not an ArcSDEDataStore"; assertTrue(failMsg, dataStore instanceof ArcSDEDataStore); } @Test public void testGetInfo() { ServiceInfo info = store.getInfo(); assertNotNull(info); assertNotNull(info.getTitle()); assertNotNull(info.getDescription()); assertNotNull(info.getSchema()); } @Test public void testGet() { ServiceInfo info = store.getInfo(); assertNotNull(info); assertNotNull(info.getTitle()); assertNotNull(info.getDescription()); assertNotNull(info.getSchema()); } /** * This test is currently broken. It's a placeholder for some logic that sfarber wrote which tries * to guess the SRS of a featureclass, based on connecting to it via an SeLayer. * * @throws Throwable */ @Test @Ignore public void testAutoFillSRS() throws Throwable { ArcSDEDataStore ds = testData.getDataStore(); CoordinateReferenceSystem sdeCRS = ds.getSchema("GISDATA.TOWNS_POLY").getGeometryDescriptor().getCoordinateReferenceSystem(); LOGGER.info(sdeCRS.toWKT().replaceAll(" ", "").replaceAll("\n", "").replaceAll("\"", "\\\"")); // CoordinateReferenceSystem epsgCRS = CRS.decode("EPSG:26986"); // LOGGER.info("are these two CRS's equal? " + // CRS.equalsIgnoreMetadata(sdeCRS, epsgCRS)); if (1 == 1) return; int epsgCode = -1; int[] projcs = PeFactory.projcsCodelist(); LOGGER.info(projcs.length + " projections available."); for (int i = 0; i < projcs.length; i++) { try { PeProjectedCS candidate = PeFactory.projcs(projcs[i]); // in ArcSDE 9.2, if the PeFactory doesn't support a projection // it claimed // to support, it returns 'null'. So check for it. if (candidate != null && candidate.getName().indexOf("Massachusetts") != -1) { // LOGGER.info("\n\n" + projcs[i] + " has name " + // candidate.getName() + "\ntried to match " + wktName + // "\n\n"); epsgCode = projcs[i]; } else if (candidate == null) { // LOGGER.info(projcs[i] + " was null"); } else if (candidate != null) { // LOGGER.info(projcs[i] + " wasn't null"); } } catch (PeProjectionException pe) { // Strangely SDE includes codes in the projcsCodeList() that // it doesn't actually support. // Catch the exception and skip them here. } } } @Test public void testDispose() throws IOException { store.dispose(); try { ((ArcSDEDataStore) store).getSession(Transaction.AUTO_COMMIT); fail("Expected IllegalStateException when the datastore has been disposed"); } catch (IllegalStateException e) { assertTrue(true); } finally { // dispose test data so next test does not fail due to pool being // closed testData.tearDown(false, true); testData = null; } } /** * test that a ArcSDEDataStore that connects to de configured test database contains the tables * defined by the parameters "point_table", "line_table" and "polygon_table", wether ot not * they're defined as single table names or as full qualified sde table names (i.e. * SDE.SDE.TEST_POINT) * * @throws IOException * @throws SeException */ @Test public void testGetTypeNames() throws IOException, SeException { String[] featureTypes = store.getTypeNames(); assertNotNull(featureTypes); // if (LOGGER.isLoggable(Level.FINE)) { // for (int i = 0; i < featureTypes.length; i++) // System.out.println(featureTypes[i]); // } testTypeExists(featureTypes, testData.getTempTableName()); } /** * tests that the schema for the defined tests tables are returned. * * @throws IOException DOCUMENT ME! * @throws SeException */ @Test public void testGetSchema() throws IOException, SeException { SimpleFeatureType schema; schema = store.getSchema(testData.getTempTableName()); assertNotNull(schema); // ROW_ID is not included in TEST_TABLE_COLS assertEquals(TEST_TABLE_COLS.length, schema.getAttributeCount()); for (int i = 0; i < TEST_TABLE_COLS.length; i++) { assertEquals("at index" + i, TEST_TABLE_COLS[i], schema.getDescriptor(i).getLocalName()); } assertFalse(schema.getDescriptor(0).isNillable()); assertTrue(schema.getDescriptor(1).isNillable()); } /** * Tests the creation of new feature types, with CRS and all. * * <p>This test also ensures that the arcsde datastore is able of creating schemas where the * geometry attribute is not the last one. This is important since to do so, the ArcSDE datastore * must break the usual way of creating schemas with the ArcSDE Java API, in which one first * creates the (non spatially enabled) "table" with all the non spatial attributes and finally * creates the "layer", adding the spatial attribute to the previously created table. So, this * test ensures the datastore correctly works arround this limitation. * * @throws IOException * @throws SchemaException * @throws SeException * @throws UnavailableConnectionException */ @Test public void testCreateSchema() throws IOException, SchemaException, SeException, UnavailableConnectionException { final String typeName; { ISessionPool connectionPool = testData.getConnectionPool(); ISession session = connectionPool.getSession(); final String user; user = session.getUser(); session.dispose(); typeName = user + ".GT_TEST_CREATE"; } SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); b.setName(typeName); b.add("FST_COL", String.class); b.add("SECOND_COL", String.class); b.add("GEOM", Point.class); b.add("FOURTH_COL", Integer.class); final SimpleFeatureType type = b.buildFeatureType(); DataStore ds = testData.getDataStore(); testData.deleteTable(typeName); Map hints = new HashMap(); hints.put("configuration.keyword", testData.getConfigKeyword()); ((ArcSDEDataStore) ds).createSchema(type, hints); testData.deleteTable(typeName); } @SuppressWarnings("unchecked") @Test public void testCreateNillableShapeSchema() throws IOException, SchemaException, SeException, UnavailableConnectionException { SimpleFeatureType type; final String typeName = "GT_TEST_CREATE"; SimpleFeatureTypeBuilder b = new SimpleFeatureTypeBuilder(); b.setName(typeName); b.add("OBJECTID", Integer.class); b.nillable(true); b.add("SHAPE", MultiLineString.class); type = b.buildFeatureType(); ArcSDEDataStore ds = testData.getDataStore(); testData.deleteTable(typeName); Map hints = new HashMap(); hints.put("configuration.keyword", testData.getConfigKeyword()); ds.createSchema(type, hints); testData.deleteTable(typeName); } // ///////////////// HELPER FUNCTIONS //////////////////////// /** * checks for the existence of <code>table</code> in <code>featureTypes</code>. <code>table</code> * must be a full qualified sde feature type name. (i.e "TEST_POINT" == "SDE.SDE.TEST_POINT") * * @param featureTypes DOCUMENT ME! * @param table DOCUMENT ME! */ private void testTypeExists(String[] featureTypes, String table) { for (int i = 0; i < featureTypes.length; i++) { if (featureTypes[i].equalsIgnoreCase(table.toUpperCase())) { LOGGER.fine("testTypeExists OK: " + table); return; } } fail("table " + table + " not found in getFeatureTypes results"); } }
/** * Makes sure the file path for files do start with file:// otherwise stuff like /home/user/file.shp * won't be recognized as valid. Also, if a path is inside the data directory it will be turned into * a relative path * * @author Andrea Aime - GeoSolutions */ public class FileModel implements IModel<String> { private static final long serialVersionUID = 3911203737278340528L; static final Logger LOGGER = Logging.getLogger(FileModel.class); IModel<String> delegate; File rootDir; public FileModel(IModel<String> delegate) { this(delegate, GeoServerExtensions.bean(GeoServerResourceLoader.class).getBaseDirectory()); } public FileModel(IModel<String> delegate, File rootDir) { this.delegate = delegate; this.rootDir = rootDir; } private boolean isSubfile(File root, File selection) { if (selection == null || "".equals(selection.getPath())) return false; if (selection.equals(root)) return true; return isSubfile(root, selection.getParentFile()); } public String getObject() { return delegate.getObject(); } public void detach() { // TODO Auto-generated method stub } public void setObject(String location) { if (location != null) { File dataDirectory = canonicalize(rootDir); File file = canonicalize(new File(location)); if (isSubfile(dataDirectory, file)) { File curr = file; String path = null; // paranoid check to avoid infinite loops while (curr != null && !curr.equals(dataDirectory)) { if (path == null) { path = curr.getName(); } else { path = curr.getName() + "/" + path; } curr = curr.getParentFile(); } location = "file:" + path; } else { File dataFile = Files.url(rootDir, location); if (dataFile != null && !dataFile.equals(file)) { // relative to the data directory, does not need fixing } else { location = "file://" + file.getAbsolutePath(); } } } delegate.setObject(location); } /** * Turns a file in canonical form if possible * * @param file */ File canonicalize(File file) { try { return file.getCanonicalFile(); } catch (IOException e) { LOGGER.log(Level.INFO, "Could not convert " + file + " into canonical form", e); return file; } } }
/** * Creates a MultiPoint, MultiLineString, or MultiPolygon geometry as required by the internal * functions. * * @author Ian Turton, CCG * @author Rob Hranac, Vision for New York * @source $URL$ * @version $Id$ */ public class SubHandlerMulti extends SubHandler { /** The logger for the GML module. */ private static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger("org.geotools.gml"); /** Remembers the list of all possible sub (base) types for this multi type. */ private static final Collection BASE_GEOMETRY_TYPES = new Vector(java.util.Arrays.asList(new String[] {"Point", "LineString", "Polygon"})); /** Geometry factory to return the multi type. */ private GeometryFactory geometryFactory = new GeometryFactory(); /** Handler factory to return the sub type. */ private SubHandlerFactory handlerFactory = new SubHandlerFactory(); /** Creates a SubHandler for the current sub type. */ private SubHandler currentHandler; /** Stores list of all sub types. */ private List geometries = new Vector(); /** Remembers the current sub type (ie. Line, Polygon, Point). */ private String internalType; /** Remembers whether or not the internal type is set already. */ private boolean internalTypeSet = false; /** Empty constructor. */ public SubHandlerMulti() {} /** * Handles all internal (sub) geometries. * * @param message The sub geometry type found. * @param type Whether or not it is at a start or end. */ public void subGeometry(String message, int type) { LOGGER.fine("subGeometry message = " + message + " type = " + type); // if the internal type is not yet set, set it if (!internalTypeSet) { if (BASE_GEOMETRY_TYPES.contains(message)) { internalType = message; internalTypeSet = true; LOGGER.fine("Internal type set to " + message); } } // if the internal type is already set, then either: // create a new handler, if at start of geometry, or // return the completed geometry, if at the end of it if (message.equals(internalType)) { if (type == GEOMETRY_START) { currentHandler = handlerFactory.create(internalType); } else if (type == GEOMETRY_END) { geometries.add(currentHandler.create(geometryFactory)); } else if (type == GEOMETRY_SUB) { currentHandler.subGeometry(message, type); } } else { currentHandler.subGeometry(message, type); LOGGER.fine(internalType + " != " + message); } } /** * Adds a coordinate to the current internal (sub) geometry. * * @param coordinate The coordinate. */ public void addCoordinate(Coordinate coordinate) { currentHandler.addCoordinate(coordinate); } /** * Determines whether or not it is time to return this geometry. * * @param message The geometry element that prompted this check. * @return DOCUMENT ME! */ public boolean isComplete(String message) { if (message.equals("Multi" + internalType)) { return true; } else { return false; } } /** * Returns a completed multi type. * * @param geometryFactory The factory this method should use to create the multi type. * @return Appropriate multi geometry type. */ public Geometry create(GeometryFactory geometryFactory) { if (internalType.equals("Point")) { Point[] pointArray = geometryFactory.toPointArray(geometries); MultiPoint multiPoint = geometryFactory.createMultiPoint(pointArray); multiPoint.setUserData(getSRS()); multiPoint.setSRID(getSRID()); LOGGER.fine("created " + multiPoint); return multiPoint; } else if (internalType.equals("LineString")) { LineString[] lineStringArray = geometryFactory.toLineStringArray(geometries); MultiLineString multiLineString = geometryFactory.createMultiLineString(lineStringArray); multiLineString.setUserData(getSRS()); multiLineString.setSRID(getSRID()); LOGGER.fine("created " + multiLineString); return multiLineString; } else if (internalType.equals("Polygon")) { Polygon[] polygonArray = geometryFactory.toPolygonArray(geometries); MultiPolygon multiPolygon = geometryFactory.createMultiPolygon(polygonArray); multiPolygon.setUserData(getSRS()); multiPolygon.setSRID(getSRID()); LOGGER.fine("created " + multiPolygon); return multiPolygon; } else { return null; } } }
/** * Oracle specific filter encoder. * * @author Justin Deoliveira, OpenGEO * @author Andrea Aime, OpenGEO * @source $URL$ */ public class OracleFilterToSQL extends PreparedFilterToSQL { /** Logger - for logging */ private static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger("org.geotools.filter.SQLEncoderOracle"); /** Contains filter type to SDO_RELATE mask type mappings */ private static final Map<Class, String> SDO_RELATE_MASK_MAP = new HashMap<Class, String>() { { put(Contains.class, "contains"); put(Crosses.class, "overlapbdydisjoint"); put(Equals.class, "equal"); put(Overlaps.class, "overlapbdyintersect"); put(Touches.class, "touch"); put(Within.class, "inside"); put(Disjoint.class, "disjoint"); put(BBOX.class, "anyinteract"); put(Intersects.class, "anyinteract"); } }; /** The whole world in WGS84 */ private static final Envelope WORLD = new Envelope(-179.99, 179.99, -89.99, 89.99); /** * If we have to turn <code>a op b</code> into <code>b op2 a</code>, what's the op2 that returns * the same result? */ private static final Map<String, String> INVERSE_OPERATOR_MAP = new HashMap<String, String>() { { // asymmetric operators, op2 = !op put("contains", "inside"); put("inside", "contains"); // symmetric operators, op2 = op put("overlapbdydisjoint", "overlapbdydisjoint"); put("overlapbdyintersect", "overlapbdyintersect"); put("touch", "touch"); put("equal", "equal"); put("anyinteract", "anyinteract"); put("disjoint", "disjoint"); } }; /** Whether BBOX should be encoded as just a primary filter or primary+secondary */ protected boolean looseBBOXEnabled; public OracleFilterToSQL(PreparedStatementSQLDialect dialect) { super(dialect); setSqlNameEscape("\""); } public boolean isLooseBBOXEnabled() { return looseBBOXEnabled; } public void setLooseBBOXEnabled(boolean looseBBOXEnabled) { this.looseBBOXEnabled = looseBBOXEnabled; } @Override protected FilterCapabilities createFilterCapabilities() { FilterCapabilities caps = new FilterCapabilities(); caps.addAll(SQLDialect.BASE_DBMS_CAPABILITIES); // adding the spatial filters support caps.addType(BBOX.class); caps.addType(Contains.class); caps.addType(Crosses.class); caps.addType(Disjoint.class); caps.addType(Equals.class); caps.addType(Intersects.class); caps.addType(Overlaps.class); caps.addType(Touches.class); caps.addType(Within.class); caps.addType(DWithin.class); caps.addType(Beyond.class); // temporal filters caps.addType(After.class); caps.addType(Before.class); caps.addType(Begins.class); caps.addType(BegunBy.class); caps.addType(During.class); caps.addType(TOverlaps.class); caps.addType(Ends.class); caps.addType(EndedBy.class); caps.addType(TEquals.class); return caps; } @Override protected Object visitBinarySpatialOperator( BinarySpatialOperator filter, PropertyName property, Literal geometry, boolean swapped, Object extraData) { return visitBinarySpatialOperator( filter, (Expression) property, (Expression) geometry, swapped, extraData); } @Override protected Object visitBinarySpatialOperator( BinarySpatialOperator filter, Expression e1, Expression e2, Object extraData) { return visitBinarySpatialOperator(filter, e1, e2, false, extraData); } protected Object visitBinarySpatialOperator( BinarySpatialOperator filter, Expression e1, Expression e2, boolean swapped, Object extraData) { try { e1 = clipToWorld(filter, e1); e2 = clipToWorld(filter, e2); if (filter instanceof Beyond || filter instanceof DWithin) doSDODistance(filter, e1, e2, extraData); else if (filter instanceof BBOX && looseBBOXEnabled) { doSDOFilter(filter, e1, e2, extraData); } else doSDORelate(filter, e1, e2, swapped, extraData); } catch (IOException ioe) { throw new RuntimeException(IO_ERROR, ioe); } return extraData; } Expression clipToWorld(BinarySpatialOperator filter, Expression e) { if (e instanceof Literal) { Geometry eval = e.evaluate(filter, Geometry.class); // Oracle cannot deal with filters using geometries that span beyond the whole world // in case the if (dialect != null && isCurrentGeometryGeodetic() && !WORLD.contains(eval.getEnvelopeInternal())) { Geometry result = eval.intersection(JTS.toGeometry(WORLD)); if (result != null && !result.isEmpty()) { if (result instanceof GeometryCollection) { result = distillSameTypeGeometries((GeometryCollection) result, eval); } e = new FilterFactoryImpl().createLiteralExpression(result); } } } return e; } /** * Returns true if the current geometry has the geodetic marker raised * * @return */ boolean isCurrentGeometryGeodetic() { if (currentGeometry != null) { Boolean geodetic = (Boolean) currentGeometry.getUserData().get(OracleDialect.GEODETIC); return geodetic != null && geodetic; } return false; } protected Geometry distillSameTypeGeometries(GeometryCollection coll, Geometry original) { if (original instanceof Polygon || original instanceof MultiPolygon) { List<Polygon> polys = new ArrayList<Polygon>(); accumulateGeometries(polys, coll, Polygon.class); return original .getFactory() .createMultiPolygon(((Polygon[]) polys.toArray(new Polygon[polys.size()]))); } else if (original instanceof LineString || original instanceof MultiLineString) { List<LineString> ls = new ArrayList<LineString>(); accumulateGeometries(ls, coll, LineString.class); return original .getFactory() .createMultiLineString((LineString[]) ls.toArray(new LineString[ls.size()])); } else if (original instanceof Point || original instanceof MultiPoint) { List<LineString> points = new ArrayList<LineString>(); accumulateGeometries(points, coll, LineString.class); return original .getFactory() .createMultiPoint((Point[]) points.toArray(new Point[points.size()])); } else { return original; } } protected <T> void accumulateGeometries( List<T> collection, Geometry g, Class<? extends T> target) { if (target.isInstance(g)) { collection.add((T) g); } else if (g instanceof GeometryCollection) { GeometryCollection coll = (GeometryCollection) g; for (int i = 0; i < coll.getNumGeometries(); i++) { accumulateGeometries(collection, coll.getGeometryN(i), target); } } } protected void doSDOFilter(Filter filter, Expression e1, Expression e2, Object extraData) throws IOException { out.write("SDO_FILTER("); e1.accept(this, extraData); out.write(", "); e2.accept(this, extraData); // for backwards compatibility with Oracle 9 we add the mask and querytypes params out.write(", 'mask=anyinteract querytype=WINDOW') = 'TRUE' "); } /** * Encodes an SDO relate * * @param filter * @param property * @param geometry * @param extraData */ protected void doSDORelate( Filter filter, Expression e1, Expression e2, boolean swapped, Object extraData) throws IOException { // grab the operating mask String mask = null; for (Class filterClass : SDO_RELATE_MASK_MAP.keySet()) { if (filterClass.isAssignableFrom(filter.getClass())) mask = SDO_RELATE_MASK_MAP.get(filterClass); } if (mask == null) throw new IllegalArgumentException( "Cannot encode filter " + filter.getClass() + " into a SDO_RELATE"); if (swapped) mask = INVERSE_OPERATOR_MAP.get(mask); // ok, ready to write out the SDO_RELATE out.write("SDO_RELATE("); e1.accept(this, extraData); out.write(", "); e2.accept(this, extraData); // for disjoint we ask for no interaction, anyinteract == false if (filter instanceof Disjoint) { out.write(", 'mask=ANYINTERACT querytype=WINDOW') <> 'TRUE' "); } else { out.write(", 'mask=" + mask + " querytype=WINDOW') = 'TRUE' "); } } protected void doSDODistance( BinarySpatialOperator filter, Expression e1, Expression e2, Object extraData) throws IOException { double distance = ((DistanceBufferOperator) filter).getDistance(); String unit = ((DistanceBufferOperator) filter).getDistanceUnits(); String within = filter instanceof DWithin ? "TRUE" : "FALSE"; out.write("SDO_WITHIN_DISTANCE("); e1.accept(this, extraData); out.write(","); e2.accept(this, extraData); // encode the unit verbatim when available if (unit != null && !"".equals(unit.trim())) out.write(",'distance=" + distance + " unit=" + unit + "') = '" + within + "' "); else out.write(",'distance=" + distance + "') = '" + within + "' "); } }
/** * @author Russell Petty (GeoScience Victoria) * @source $URL$ * http://svn.osgeo.org/geotools/trunk/modules/unsupported/app-schema/app-schema/src/main * /java/org/geotools/data/complex/MappingFeatureIteratorFactory.java $ */ public class MappingFeatureIteratorFactory { protected static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger("org.geotools.data.complex"); protected static class CheckIfNestedFilterVisitor extends DefaultFilterVisitor { public boolean hasNestedAttributes = false; public Object visit(PropertyName expression, Object data) { if (expression instanceof NestedAttributeExpression) { hasNestedAttributes = true; } return data; } } public static IMappingFeatureIterator getInstance( AppSchemaDataAccess store, FeatureTypeMapping mapping, Query query, Filter unrolledFilter) throws IOException { if (mapping instanceof XmlFeatureTypeMapping) { return new XmlMappingFeatureIterator(store, mapping, query); } if (AppSchemaDataAccessConfigurator.isJoining()) { if (!(query instanceof JoiningQuery)) { query = new JoiningQuery(query); } FeatureSource mappedSource = mapping.getSource(); FilterCapabilities capabilities = getFilterCapabilities(mappedSource); IMappingFeatureIterator iterator; if (unrolledFilter != null) { query.setFilter(Filter.INCLUDE); Query unrolledQuery = store.unrollQuery(query, mapping); unrolledQuery.setFilter(unrolledFilter); if (isSimpleType(mapping)) { iterator = new MappingAttributeIterator(store, mapping, query, false, unrolledQuery); } else { iterator = new DataAccessMappingFeatureIterator(store, mapping, query, false, unrolledQuery); } } else { Filter filter = query.getFilter(); ComplexFilterSplitter splitter = new ComplexFilterSplitter(capabilities, mapping); filter.accept(splitter, null); query.setFilter(splitter.getFilterPre()); filter = splitter.getFilterPost(); int maxFeatures = Query.DEFAULT_MAX; if (filter != null && filter != Filter.INCLUDE) { maxFeatures = query.getMaxFeatures(); query.setMaxFeatures(Query.DEFAULT_MAX); } iterator = new DataAccessMappingFeatureIterator(store, mapping, query, false); if (filter != null && filter != Filter.INCLUDE) { iterator = new PostFilteringMappingFeatureIterator(iterator, filter, maxFeatures); } } return iterator; } else { if (query.getFilter() != null) { Query unrolledQuery = store.unrollQuery(query, mapping); Filter filter = unrolledQuery.getFilter(); CheckIfNestedFilterVisitor visitor = new CheckIfNestedFilterVisitor(); filter.accept(visitor, null); if (visitor.hasNestedAttributes) { FeatureSource mappedSource = mapping.getSource(); if (mappedSource instanceof JDBCFeatureSource || mappedSource instanceof JDBCFeatureStore) { FilterCapabilities capabilities = getFilterCapabilities(mappedSource); ComplexFilterSplitter splitter = new ComplexFilterSplitter(capabilities, mapping); filter.accept(splitter, null); query.setFilter(splitter.getFilterPre()); unrolledQuery.setFilter(splitter.getFilterPre()); filter = splitter.getFilterPost(); } else { // VT:no Filtering capbilities cause source may not be of jdbc type // therefore we continue; // has nested attribute in the filter expression unrolledQuery.setFilter(Filter.INCLUDE); } return new FilteringMappingFeatureIterator(store, mapping, query, unrolledQuery, filter); } else if (!filter.equals(Filter.INCLUDE) && !filter.equals(Filter.EXCLUDE) && !(filter instanceof FidFilterImpl)) { // normal filters if (isSimpleType(mapping)) { return new MappingAttributeIterator(store, mapping, query, true, unrolledQuery); } else { return new DataAccessMappingFeatureIterator(store, mapping, query, true, unrolledQuery); } } } return new DataAccessMappingFeatureIterator(store, mapping, query, false); } } private static boolean isSimpleType(FeatureTypeMapping mapping) { return Types.isSimpleContentType(mapping.getTargetFeature().getType()); } private static FilterCapabilities getFilterCapabilities(FeatureSource mappedSource) throws IllegalArgumentException { FilterCapabilities capabilities = null; if (mappedSource instanceof JDBCFeatureSource) { capabilities = ((JDBCFeatureSource) mappedSource).getDataStore().getFilterCapabilities(); } else if (mappedSource instanceof JDBCFeatureStore) { capabilities = ((JDBCFeatureStore) mappedSource).getDataStore().getFilterCapabilities(); } else { throw new IllegalArgumentException("Joining queries are only supported on JDBC data stores"); } return capabilities; } }
/** * Backwards (<= 2.1.3) compatible {@link GeoServerTileLayerInfoImpl} loader. * * @author groldan */ public class LegacyTileLayerInfoLoader { private static final Logger LOGGER = Logging.getLogger(LegacyTileLayerInfoLoader.class); public static final String CONFIG_KEY_ENABLED = "GWC.enabled"; public static final String CONFIG_KEY_GUTTER = "GWC.gutter"; public static final String CONFIG_KEY_GRIDSETS = "GWC.gridSets"; public static final String CONFIG_KEY_METATILING_X = "GWC.metaTilingX"; public static final String CONFIG_KEY_METATILING_Y = "GWC.metaTilingY"; public static final String CONFIG_KEY_FORMATS = "GWC.cacheFormats"; public static final String CONFIG_KEY_AUTO_CACHE_STYLES = "GWC.autoCacheStyles"; public static final String CONFIG_KEY_CACHED_STYLES = "GWC.cachedNonDefaultStyles"; public static final String CONFIG_KEY_IN_MEMORY_CACHED = "GWC.inMemoryUncached"; public static final String[] _ALL_KEYS = { CONFIG_KEY_ENABLED, CONFIG_KEY_GUTTER, CONFIG_KEY_GRIDSETS, CONFIG_KEY_METATILING_X, CONFIG_KEY_METATILING_Y, CONFIG_KEY_FORMATS, CONFIG_KEY_AUTO_CACHE_STYLES, CONFIG_KEY_CACHED_STYLES, CONFIG_KEY_IN_MEMORY_CACHED }; public static GeoServerTileLayerInfoImpl load(final LayerInfo layer) { MetadataMap metadataMap = layer.getMetadata(); if (!hasTileLayerDef(metadataMap)) { return null; } GeoServerTileLayerInfoImpl tileLayerInfo = load(metadataMap); if (metadataMap.containsKey(CONFIG_KEY_CACHED_STYLES)) { final String defaultStyle = layer.getDefaultStyle() == null ? "" : layer.getDefaultStyle().prefixedName(); String cachedStylesStr = metadataMap.get(CONFIG_KEY_CACHED_STYLES, String.class); Set<String> cachedStyles = unmarshalSet(cachedStylesStr); TileLayerInfoUtil.setCachedStyles(tileLayerInfo, defaultStyle, cachedStyles); } TileLayerInfoUtil.checkAutomaticStyles(layer, tileLayerInfo); tileLayerInfo.setName(tileLayerName(layer)); tileLayerInfo.setId(layer.getId()); return tileLayerInfo; } public static boolean hasTileLayerDef(MetadataMap metadataMap) { return metadataMap.containsKey(CONFIG_KEY_ENABLED); } public static GeoServerTileLayerInfoImpl load(final LayerGroupInfo layerGroup) { MetadataMap metadataMap = layerGroup.getMetadata(); if (!hasTileLayerDef(metadataMap)) { return null; } GeoServerTileLayerInfoImpl tileLayerInfo = load(metadataMap); if (tileLayerInfo != null) { tileLayerInfo.setName(tileLayerName(layerGroup)); tileLayerInfo.setId(layerGroup.getId()); } TileLayerInfoUtil.checkAutomaticStyles(layerGroup, tileLayerInfo); return tileLayerInfo; } private static GeoServerTileLayerInfoImpl load(final MetadataMap metadataMap) { GeoServerTileLayerInfoImpl info = new GeoServerTileLayerInfoImpl(); // whether the config needs to be saved final boolean enabled = metadataMap.get(CONFIG_KEY_ENABLED, Boolean.class).booleanValue(); info.setEnabled(enabled); int gutter = metadataMap.get(CONFIG_KEY_GUTTER, Integer.class).intValue(); info.setGutter(gutter); String gridsets = metadataMap.get(CONFIG_KEY_GRIDSETS, String.class); Set<XMLGridSubset> gridSetIds = unmarshalGridSubsets(gridsets); info.getGridSubsets().addAll(gridSetIds); int metaTilingX = metadataMap.get(CONFIG_KEY_METATILING_X, Integer.class).intValue(); info.setMetaTilingX(metaTilingX); int metaTilingY = metadataMap.get(CONFIG_KEY_METATILING_Y, Integer.class).intValue(); info.setMetaTilingY(metaTilingY); if (metadataMap.containsKey(CONFIG_KEY_FORMATS)) { String mimeFormatsStr = metadataMap.get(CONFIG_KEY_FORMATS, String.class); Set<String> mimeFormats = unmarshalSet(mimeFormatsStr); info.getMimeFormats().addAll(mimeFormats); } if (metadataMap.containsKey(CONFIG_KEY_AUTO_CACHE_STYLES)) { boolean autoCacheStyles = metadataMap.get(CONFIG_KEY_AUTO_CACHE_STYLES, Boolean.class).booleanValue(); info.setAutoCacheStyles(autoCacheStyles); } if (metadataMap.containsKey(CONFIG_KEY_IN_MEMORY_CACHED)) { boolean inMemoryCached = metadataMap.get(CONFIG_KEY_IN_MEMORY_CACHED, Boolean.class); info.setInMemoryCached(inMemoryCached); } return info; } private static Set<String> unmarshalSet(final String listStr) { Set<String> unmarshalled = new HashSet<String>(Arrays.asList(listStr.split(","))); return unmarshalled; } private static String marshalList(final Collection<String> list) { StringBuilder sb = new StringBuilder(); for (Iterator<String> i = list.iterator(); i.hasNext(); ) { sb.append(i.next()); if (i.hasNext()) { sb.append(","); } } return sb.toString(); } /** * @param gridSubsetsStr comma separated list of epsg codes (usually just {@code * EPSG:900913,EPSG:4326} * @return the list of parsed grid subsets from the argument JSON array * @throws IllegalArgumentException if {@code str} can't be parsed to a JSONArray */ private static Set<XMLGridSubset> unmarshalGridSubsets(String gridSubsetsStr) throws IllegalArgumentException { Set<XMLGridSubset> gridSubsets = new HashSet<XMLGridSubset>(); // backwards compatibility check for when str comes in as "EPSG:XXX,EPSG:YYY" String[] epsgCodes = gridSubsetsStr.split(","); for (String code : epsgCodes) { if (code.trim().length() == 0) { continue; } try { XMLGridSubset xmlGridSubset = new XMLGridSubset(); xmlGridSubset.setGridSetName(code); gridSubsets.add(xmlGridSubset); } catch (Exception e) { LOGGER.log(Level.WARNING, "Invalid GridSubset list: " + gridSubsetsStr); } } gridSubsets.remove(null); return gridSubsets; } public static void clear(MetadataMap metadata) { if (metadata != null) { for (String key : LegacyTileLayerInfoLoader._ALL_KEYS) { metadata.remove(key); } } } /** * Saves a tile layer info into the given metadata map using the old legacy metadata elements. For * unit testing only. * * @param source * @param metadata */ public static void save(GeoServerTileLayerInfo source, MetadataMap metadata) { final boolean enabled = source.isEnabled(); final int gutter = source.getGutter(); final Set<XMLGridSubset> cachedGridSubsets = source.getGridSubsets(); final int metaTilingX = source.getMetaTilingX(); final int metaTilingY = source.getMetaTilingY(); final Set<String> mimeFormats = source.getMimeFormats(); final Boolean autoCacheStyles = source.isAutoCacheStyles(); final Set<String> cachedStyles = source.cachedStyles(); final boolean inMemoryCached = source.isInMemoryCached(); metadata.put(CONFIG_KEY_ENABLED, Boolean.valueOf(enabled)); metadata.put(CONFIG_KEY_GUTTER, Integer.valueOf(gutter)); Collection<String> subsetNames = new ArrayList<String>(); for (XMLGridSubset s : cachedGridSubsets) { subsetNames.add(s.getGridSetName()); } metadata.put(CONFIG_KEY_GRIDSETS, marshalList(subsetNames)); metadata.put(CONFIG_KEY_METATILING_X, Integer.valueOf(metaTilingX)); metadata.put(CONFIG_KEY_METATILING_Y, Integer.valueOf(metaTilingY)); metadata.put(CONFIG_KEY_FORMATS, marshalList(mimeFormats)); metadata.put(CONFIG_KEY_AUTO_CACHE_STYLES, autoCacheStyles); metadata.put(CONFIG_KEY_IN_MEMORY_CACHED, inMemoryCached); if (cachedStyles.size() > 0) { metadata.put(CONFIG_KEY_CACHED_STYLES, marshalList(cachedStyles)); } else { metadata.remove(CONFIG_KEY_CACHED_STYLES); } } }
public abstract class SolrTestSupport extends OnlineTestCase { protected static final Logger LOGGER = org.geotools.util.logging.Logging.getLogger(SolrTestSupport.class); static { // uncomment to turn up logging java.util.logging.ConsoleHandler handler = new java.util.logging.ConsoleHandler(); handler.setLevel(java.util.logging.Level.FINE); org.geotools.util.logging.Logging.getLogger("org.geotools.data.solr") .setLevel(java.util.logging.Level.FINE); org.geotools.util.logging.Logging.getLogger("org.geotools.data.solr").addHandler(handler); } protected SolrFeatureSource featureSource; protected SolrDataStore dataStore; protected String testFile = "wifiAccessPoint.xml"; protected String layerName = "active"; protected int SOURCE_SRID = 4326; protected String pkField; private ArrayList<SolrAttribute> attributes; private static boolean setUpIsDone = false; protected DateFormat df = new SimpleDateFormat("yyyy-dd-MM HH:mm:ss"); public void setUpSolrFile(String url) throws Exception { if (setUpIsDone) { return; } // do the setup File testDir = (Paths.get(getClass().getResource("/" + testFile).toURI()).getParent()).toFile(); ProcessBuilder pb = new ProcessBuilder("java", "-Durl=" + url + "/update", "-jar", "post.jar", testFile); pb.directory(testDir); LOGGER.log(Level.FINE, "Starting SOLR import"); final Process command = pb.start(); LOGGER.log(Level.FINE, "Started SOLR import"); String line; BufferedReader bri = new BufferedReader(new InputStreamReader(command.getInputStream())); BufferedReader bre = new BufferedReader(new InputStreamReader(command.getErrorStream())); while ((line = bri.readLine()) != null) { LOGGER.log(Level.FINE, line); } bri.close(); while ((line = bre.readLine()) != null) { LOGGER.log(Level.FINE, line); } bre.close(); int i = command.waitFor(); assertTrue(i == 0); LOGGER.log(Level.FINE, "SOLR import DONE!"); setUpIsDone = true; } @Override protected void connect() throws Exception { String url = fixture.getProperty(SolrDataStoreFactory.URL.key); setUpSolrFile(url); Map params = createConnectionParams(url, fixture); SolrDataStoreFactory factory = new SolrDataStoreFactory(); dataStore = (SolrDataStore) factory.createDataStore(params); attributes = dataStore.getSolrAttributes(this.layerName); for (SolrAttribute at : attributes) { if (at.isPk()) { this.pkField = at.getName(); } if (Geometry.class.isAssignableFrom(at.getType())) { at.setSrid(SOURCE_SRID); } at.setUse(true); } } protected Map createConnectionParams(String url, Properties fixture) { String field = fixture.getProperty(SolrDataStoreFactory.FIELD.key); Map params = new HashMap(); params.put(SolrDataStoreFactory.URL.key, url); params.put(SolrDataStoreFactory.FIELD.key, field); params.put(SolrDataStoreFactory.NAMESPACE.key, SolrDataStoreFactory.NAMESPACE.sample); return params; } protected void init() throws Exception { df.setTimeZone(TimeZone.getTimeZone("UTC")); init(this.layerName); } protected void init(String layerName) throws Exception { init(layerName, "geo"); } protected void init(String layerName, String geometryField) throws Exception { this.layerName = layerName; SolrLayerConfiguration solrLayerConfiguration = new SolrLayerConfiguration(new ArrayList<SolrAttribute>()); solrLayerConfiguration.setLayerName(this.layerName); List<SolrAttribute> layerAttributes = new ArrayList<>(); for (SolrAttribute solrAttribute : attributes) { if (geometryField.equals(solrAttribute.getName())) { SolrAttribute copy = new SolrAttribute(solrAttribute); copy.setDefaultGeometry(true); layerAttributes.add(copy); } else { layerAttributes.add(solrAttribute); } } solrLayerConfiguration.getAttributes().addAll(layerAttributes); dataStore.setSolrConfigurations(solrLayerConfiguration); featureSource = (SolrFeatureSource) dataStore.getFeatureSource(this.layerName); } @Override protected void disconnect() throws Exception { dataStore.dispose(); } @Override protected String getFixtureId() { return SolrDataStoreFactory.NAMESPACE.sample.toString(); } protected Date date(String date) throws ParseException { return df.parse(date); } protected Instant instant(String d) throws ParseException { return new DefaultInstant(new DefaultPosition(date(d))); } protected Period period(String d1, String d2) throws ParseException { return new DefaultPeriod(instant(d1), instant(d2)); } }