@Test public void testDelete() throws CQLException { assertEquals(1, store.remove(CQL.toFilter("processName = 'test1'"))); checkContains(store.list(Query.ALL), s2, s3, s4); assertEquals(2, store.remove(CQL.toFilter("progress > 30"))); checkContains(store.list(Query.ALL), s2); assertEquals(1, store.remove(CQL.toFilter("phase = 'FAILED'"))); checkContains(store.list(Query.ALL)); }
public static Filter parseFilter(String cql) { try { return CQL.toFilter(cql); } catch (CQLException ex) { throw new IllegalArgumentException(ex); } }
public void testToFilter() throws Exception { String filter = "IN('foo','bar')"; try { CQL.toFilter(filter); fail("filter should have thrown exception"); } catch (CQLException e) { } Filter f1 = ECQL.toFilter(filter); Filter f2 = XCQL.toFilter(filter); assertEquals(f1, f2); }
/** * Parses a CQL/ECQL filter string. * * <p>This method first attempts to parse as ECQL, and on an error falls back to CQL. * * @param filter The ecql/cql string. * @see ECQL#toFiter(String) */ public static Filter toFilter(String filter) throws CQLException { try { return ECQL.toFilter(filter); } catch (CQLException e) { // failed to parse as ecql, attempt to fall back on to CQL try { return CQL.toFilter(filter); } catch (CQLException e1) { // throw back original exception } throw e; } }
public void testToFilterFallback() throws Exception { String filter = "id = 2"; try { ECQL.toFilter(filter); fail("filter should have thrown exception"); } catch (CQLException e) { } Filter f1 = CQL.toFilter(filter); Filter f2 = XCQL.toFilter(filter); assertEquals(f1, f2); }
private void testTransformWithQuery(boolean invert) throws IOException, URISyntaxException, CQLException, NoSuchAuthorityCodeException, FactoryException, Exception { // grab the style Style style = RendererBaseTest.loadStyle( this, invert ? "attributeRename.sld" : "attributeRenameNoInvert.sld"); // grab the data File property = new File(TestData.getResource(this, "point.properties").toURI()); PropertyDataStore ds = new PropertyDataStore(property.getParentFile()); FeatureSource fs = ds.getFeatureSource("point"); // prepare a feature layer with a query and the rendering tx FeatureLayer layer = new FeatureLayer(fs, style); layer.setQuery(new Query(null, CQL.toFilter("id > 5"))); // render it MapContent mc = new MapContent(); mc.addLayer(layer); StreamingRenderer renderer = new StreamingRenderer(); final AtomicInteger counter = new AtomicInteger(); renderer.addRenderListener( new RenderListener() { @Override public void featureRenderer(SimpleFeature feature) { counter.incrementAndGet(); } @Override public void errorOccurred(Exception e) {} }); renderer.setMapContent(mc); ReferencedEnvelope re = new ReferencedEnvelope(0, 12, 0, 12, CRS.decode("EPSG:4326")); BufferedImage image = RendererBaseTest.showRender("Lines with circle stroke", renderer, TIME, re); // if everything went fine we'll have a single red dot in the middle, and we rendered // just one feature assertEquals(1, counter.get()); assertEquals(Color.RED, getPixelColor(image, image.getWidth() / 2, image.getHeight() / 2)); }
@Test public void testDispatchGetMapNonMatchingParameterFilter() throws Exception { GetMapRequest request = new GetMapRequest(); @SuppressWarnings("unchecked") Map<String, String> rawKvp = new CaseInsensitiveMap(new HashMap<String, String>()); request.setRawKvp(rawKvp); rawKvp.put("layers", tileLayer.getName()); tileLayer.setEnabled(false); assertDispatchMismatch(request, "tile layer disabled"); tileLayer.setEnabled(true); assertTrue(layer.enabled()); request.setRemoteOwsURL(new URL("http://example.com")); assertDispatchMismatch(request, "remote OWS"); request.setRemoteOwsURL(null); request.setRemoteOwsType("WFS"); assertDispatchMismatch(request, "remote OWS"); request.setRemoteOwsType(null); request.setEnv(ImmutableMap.of("envVar", "envValue")); assertDispatchMismatch(request, "no parameter filter exists for ENV"); request.setEnv(null); request.setFormatOptions(ImmutableMap.of("optKey", "optVal")); assertDispatchMismatch(request, "no parameter filter exists for FORMAT_OPTIONS"); request.setFormatOptions(null); request.setAngle(45); assertDispatchMismatch(request, "no parameter filter exists for ANGLE"); request.setAngle(0); rawKvp.put("BGCOLOR", "0xAA0000"); assertDispatchMismatch(request, "no parameter filter exists for BGCOLOR"); rawKvp.remove("BGCOLOR"); request.setBuffer(10); assertDispatchMismatch(request, "no parameter filter exists for BUFFER"); request.setBuffer(0); request.setCQLFilter(Arrays.asList(CQL.toFilter("ATT = 1"))); assertDispatchMismatch(request, "no parameter filter exists for CQL_FILTER"); request.setCQLFilter(null); request.setElevation(10D); assertDispatchMismatch(request, "no parameter filter exists for ELEVATION"); request.setElevation(Collections.emptyList()); request.setFeatureId(Arrays.asList(new FeatureIdImpl("someid"))); assertDispatchMismatch(request, "no parameter filter exists for FEATUREID"); request.setFeatureId(null); request.setFilter(Arrays.asList(CQL.toFilter("ATT = 1"))); assertDispatchMismatch(request, "no parameter filter exists for FILTER"); request.setFilter(null); request.setPalette(PaletteManager.getPalette("SAFE")); assertDispatchMismatch(request, "no parameter filter exists for PALETTE"); request.setPalette(null); request.setStartIndex(10); assertDispatchMismatch(request, "no parameter filter exists for STARTINDEX"); request.setStartIndex(null); request.setMaxFeatures(1); assertDispatchMismatch(request, "no parameter filter exists for MAXFEATURES"); request.setMaxFeatures(null); request.setTime(Arrays.asList((Object) 1, (Object) 2)); assertDispatchMismatch(request, "no parameter filter exists for TIME"); request.setTime(Collections.emptyList()); List<Map<String, String>> viewParams = ImmutableList.of((Map<String, String>) ImmutableMap.of("paramKey", "paramVal")); request.setViewParams(viewParams); assertDispatchMismatch(request, "no parameter filter exists for VIEWPARAMS"); request.setViewParams(null); request.setFeatureVersion("@version"); assertDispatchMismatch(request, "no parameter filter exists for FEATUREVERSION"); request.setFeatureVersion(null); }
@Override protected Object doExecute() throws Exception { String formatString = "%1$-33s %2$-26s %3$-" + TITLE_MAX_LENGTH + "s %4$-" + EXCERPT_MAX_LENGTH + "s%n"; CatalogFacade catalogProvider = getCatalog(); Filter filter = null; if (cqlFilter != null) { filter = CQL.toFilter(cqlFilter); } else { if (searchPhrase == null) { searchPhrase = "*"; } if (caseSensitive) { filter = getFilterBuilder() .attribute(Metacard.ANY_TEXT) .is() .like() .caseSensitiveText(searchPhrase); } else { filter = getFilterBuilder().attribute(Metacard.ANY_TEXT).is().like().text(searchPhrase); } } QueryImpl query = new QueryImpl(filter); query.setRequestsTotalResultsCount(true); if (numberOfItems > -1) { query.setPageSize(numberOfItems); } long start = System.currentTimeMillis(); SourceResponse response = catalogProvider.query(new QueryRequestImpl(query)); long end = System.currentTimeMillis(); int size = 0; if (response.getResults() != null) { size = response.getResults().size(); } console.println(); console.printf( " %d result(s) out of %s%d%s in %3.3f seconds", (size), Ansi.ansi().fg(Ansi.Color.CYAN).toString(), response.getHits(), Ansi.ansi().reset().toString(), (end - start) / MILLISECONDS_PER_SECOND); console.printf(formatString, "", "", "", ""); printHeaderMessage(String.format(formatString, ID, DATE, TITLE, EXCERPT)); for (Result result : response.getResults()) { Metacard metacard = result.getMetacard(); String title = (metacard.getTitle() != null ? metacard.getTitle() : "N/A"); String excerpt = "N/A"; String modifiedDate = ""; if (searchPhrase != null) { if (metacard.getMetadata() != null) { XPathHelper helper = new XPathHelper(metacard.getMetadata()); String indexedText = helper.getDocument().getDocumentElement().getTextContent(); indexedText = indexedText.replaceAll("\\r\\n|\\r|\\n", " "); String normalizedSearchPhrase = searchPhrase.replaceAll("\\*", ""); int index = -1; if (caseSensitive) { index = indexedText.indexOf(normalizedSearchPhrase); } else { index = indexedText.toLowerCase().indexOf(normalizedSearchPhrase.toLowerCase()); } if (index != -1) { int contextLength = (EXCERPT_MAX_LENGTH - normalizedSearchPhrase.length() - 8) / 2; excerpt = "..." + indexedText.substring(Math.max(index - contextLength, 0), index); excerpt = excerpt + Ansi.ansi().fg(Ansi.Color.GREEN).toString(); excerpt = excerpt + indexedText.substring(index, index + normalizedSearchPhrase.length()); excerpt = excerpt + Ansi.ansi().reset().toString(); excerpt = excerpt + indexedText.substring( index + normalizedSearchPhrase.length(), Math.min( indexedText.length(), index + normalizedSearchPhrase.length() + contextLength)) + "..."; } } } if (metacard.getModifiedDate() != null) { modifiedDate = new DateTime(metacard.getModifiedDate().getTime()).toString(DATETIME_FORMATTER); } console.printf( formatString, metacard.getId(), modifiedDate, title.substring(0, Math.min(title.length(), TITLE_MAX_LENGTH)), excerpt); } return null; }
/** Populate the json object with related featues */ private JSONObject populateWithRelatedFeatures( JSONObject j, SimpleFeature feature, SimpleFeatureType ft, ApplicationLayer al, int index) throws Exception { if (ft.hasRelations()) { JSONArray related_featuretypes = new JSONArray(); for (FeatureTypeRelation rel : ft.getRelations()) { boolean isJoin = rel.getType().equals(FeatureTypeRelation.JOIN); if (isJoin) { FeatureSource foreignFs = rel.getForeignFeatureType().openGeoToolsFeatureSource(TIMEOUT); FeatureIterator<SimpleFeature> foreignIt = null; try { Query foreignQ = new Query(foreignFs.getName().toString()); // create filter Filter filter = createFilter(feature, rel); if (filter == null) { continue; } // if join only get 1 feature foreignQ.setMaxFeatures(1); foreignQ.setFilter(filter); // set propertynames List<String> propertyNames; if (al != null) { propertyNames = setPropertyNames(al, foreignQ, rel.getForeignFeatureType(), edit); } else { propertyNames = new ArrayList<String>(); for (AttributeDescriptor ad : rel.getForeignFeatureType().getAttributes()) { propertyNames.add(ad.getName()); } } if (propertyNames.isEmpty()) { // if there are no properties to retrieve just get out continue; } // get aliases Map<String, String> attributeAliases = new HashMap<String, String>(); if (!edit) { for (AttributeDescriptor ad : rel.getForeignFeatureType().getAttributes()) { if (ad.getAlias() != null) { attributeAliases.put(ad.getName(), ad.getAlias()); } } } // Get Feature and populate JSON object with the values. foreignIt = foreignFs.getFeatures(foreignQ).features(); while (foreignIt.hasNext()) { SimpleFeature foreignFeature = foreignIt.next(); // join it in the same json j = toJSONFeature( j, foreignFeature, rel.getForeignFeatureType(), al, propertyNames, attributeAliases, index); } } finally { if (foreignIt != null) { foreignIt.close(); } foreignFs.getDataStore().dispose(); } } else { Filter filter = createFilter(feature, rel); if (filter == null) { continue; } JSONObject related_ft = new JSONObject(); related_ft.put("filter", CQL.toCQL(filter)); related_ft.put("id", rel.getForeignFeatureType().getId()); related_ft.put("foreignFeatureTypeName", rel.getForeignFeatureType().getTypeName()); related_featuretypes.put(related_ft); } } if (related_featuretypes.length() > 0) { j.put("related_featuretypes", related_featuretypes); } } return j; }
@Override protected Object executeWithSubject() throws Exception { List<CatalogProvider> providers = getCatalogProviders(); if (providers.isEmpty() || providers.size() < 2) { console.println("Not enough CatalogProviders installed to migrate"); return null; } console.println("The \"FROM\" provider is: " + providers.get(0).getClass().getSimpleName()); CatalogProvider provider = providers.get(1); console.println("The \"TO\" provider is: " + provider.getClass().getSimpleName()); String answer = getInput("Do you wish to continue? (yes/no)"); if (!"yes".equalsIgnoreCase(answer)) { console.println(); console.println("Now exiting..."); console.flush(); return null; } ingestProvider = new Provider(provider); framework = getCatalog(); start = System.currentTimeMillis(); final Filter filter = (cqlFilter != null) ? CQL.toFilter(cqlFilter) : getFilter(getFilterStartTime(start), start, Metacard.MODIFIED); QueryImpl query = new QueryImpl(filter); query.setRequestsTotalResultsCount(true); query.setPageSize(batchSize); query.setSortBy(new SortByImpl(Metacard.MODIFIED, SortOrder.DESCENDING)); QueryRequest queryRequest = new QueryRequestImpl(query); SourceResponse response; try { response = framework.query(queryRequest); } catch (FederationException e) { printErrorMessage("Error occurred while querying the Framework." + e.getMessage()); return null; } catch (SourceUnavailableException e) { printErrorMessage("Error occurred while querying the Framework." + e.getMessage()); return null; } catch (UnsupportedQueryException e) { printErrorMessage("Error occurred while querying the Framework." + e.getMessage()); return null; } final long totalHits = response.getHits(); final long totalPossible; if (totalHits == 0) { console.println("No records were found to replicate."); return null; } // If the maxMetacards is set, restrict the totalPossible to the number of maxMetacards if (maxMetacards > 0 && maxMetacards <= totalHits) { totalPossible = maxMetacards; } else { totalPossible = totalHits; } console.println("Starting migration for " + totalPossible + " Records"); if (multithreaded > 1 && totalPossible > batchSize) { BlockingQueue<Runnable> blockingQueue = new ArrayBlockingQueue<Runnable>(multithreaded); RejectedExecutionHandler rejectedExecutionHandler = new ThreadPoolExecutor.CallerRunsPolicy(); final ExecutorService executorService = new ThreadPoolExecutor( multithreaded, multithreaded, 0L, TimeUnit.MILLISECONDS, blockingQueue, rejectedExecutionHandler); console.printf("Running %d threads during replication.%n", multithreaded); do { LOGGER.debug("In loop at iteration {}", queryIndex.get()); executorService.submit( () -> { int count = queryAndIngest(framework, ingestProvider, queryIndex.get(), filter); printProgressAndFlush(start, totalPossible, ingestCount.addAndGet(count)); }); } while (queryIndex.addAndGet(batchSize) <= totalPossible); executorService.shutdown(); while (!executorService.isTerminated()) { try { TimeUnit.SECONDS.sleep(1); } catch (InterruptedException e) { // ignore } } } else { do { int count = queryAndIngest(framework, ingestProvider, queryIndex.get(), filter); printProgressAndFlush(start, totalPossible, ingestCount.addAndGet(count)); } while (queryIndex.addAndGet(batchSize) <= totalPossible); } console.println(); long end = System.currentTimeMillis(); String completed = String.format( " %d record(s) replicated; %d record(s) failed; completed in %3.3f seconds.", ingestCount.get(), failedCount.get(), (end - start) / MS_PER_SECOND); LOGGER.info("Replication Complete: {}", completed); console.println(completed); return null; }
@Override // Returned Map will have suffixes in the key names - client is responsible for handling them public List<Map<String, Object>> get(String type, String cql) throws PersistenceException { if (StringUtils.isBlank(type)) { throw new PersistenceException( "The type of object(s) to retrieve must be non-null and not blank, e.g., notification, metacard, etc."); } List<Map<String, Object>> results = new ArrayList<>(); // Set Solr Core name to type and create/connect to Solr Core SolrServer coreSolrServer = getSolrCore(type); if (coreSolrServer == null) { return results; } SolrQueryFilterVisitor visitor = new SolrQueryFilterVisitor(coreSolrServer, type); try { SolrQuery solrQuery; // If not cql specified, then return all items if (StringUtils.isBlank(cql)) { solrQuery = new SolrQuery("*:*"); } else { Filter filter = CQL.toFilter(cql); solrQuery = (SolrQuery) filter.accept(visitor, null); } QueryResponse solrResponse = coreSolrServer.query(solrQuery, METHOD.POST); long numResults = solrResponse.getResults().getNumFound(); LOGGER.debug("numResults = {}", numResults); SolrDocumentList docs = solrResponse.getResults(); for (SolrDocument doc : docs) { PersistentItem result = new PersistentItem(); Collection<String> fieldNames = doc.getFieldNames(); for (String name : fieldNames) { LOGGER.debug("field name = {} has value = {}", name, doc.getFieldValue(name)); if (name.endsWith(PersistentItem.TEXT_SUFFIX) && doc.getFieldValues(name).size() > 1) { result.addProperty( name, doc.getFieldValues(name) .stream() .filter(s -> s instanceof String) .map(s -> (String) s) .collect(Collectors.toSet())); } else if (name.endsWith(PersistentItem.XML_SUFFIX)) { result.addXmlProperty(name, (String) doc.getFirstValue(name)); } else if (name.endsWith(PersistentItem.TEXT_SUFFIX)) { result.addProperty(name, (String) doc.getFirstValue(name)); } else if (name.endsWith(PersistentItem.LONG_SUFFIX)) { result.addProperty(name, (Long) doc.getFirstValue(name)); } else if (name.endsWith(PersistentItem.INT_SUFFIX)) { result.addProperty(name, (Integer) doc.getFirstValue(name)); } else if (name.endsWith(PersistentItem.DATE_SUFFIX)) { result.addProperty(name, (Date) doc.getFirstValue(name)); } else { LOGGER.info("Not adding field {} because it has invalid suffix", name); } } results.add(result); } } catch (CQLException e) { throw new PersistenceException( "CQLException while getting Solr data with cql statement " + cql, e); } catch (SolrServerException e) { throw new PersistenceException( "SolrServerException while getting Solr data with cql statement " + cql, e); } return results; }