Esempio n. 1
0
  /**
   * Filters the rules of <code>featureTypeStyle</code> returnting only those that apply to <code>
   * feature</code>.
   *
   * <p>This method returns rules for which:
   *
   * <ol>
   *   <li><code>rule.getFilter()</code> matches <code>feature</code>, or:
   *   <li>the rule defines an "ElseFilter", and the feature matches no other rules.
   * </ol>
   *
   * This method returns an empty array in the case of which no rules match.
   *
   * @param featureTypeStyle The feature type style containing the rules.
   * @param feature The feature being filtered against.
   */
  public static Rule[] filterRules(
      FeatureTypeStyle featureTypeStyle, SimpleFeature feature, double scaleDenominator) {
    Rule[] rules = featureTypeStyle.getRules();

    if ((rules == null) || (rules.length == 0)) {
      return new Rule[0];
    }

    ArrayList filtered = new ArrayList(rules.length);

    // process the rules, keep track of the need to apply an else filters
    boolean match = false;
    boolean hasElseFilter = false;

    for (int i = 0; i < rules.length; i++) {
      Rule rule = rules[i];
      LOGGER.finer(new StringBuffer("Applying rule: ").append(rule.toString()).toString());

      // does this rule have an else filter
      if (rule.hasElseFilter()) {
        hasElseFilter = true;

        continue;
      }

      // is this rule within scale?
      if (!isWithInScale(rule, scaleDenominator)) {
        continue;
      }

      // does this rule have a filter which applies to the feature
      Filter filter = rule.getFilter();

      if ((filter == null) || filter.evaluate(feature)) {
        match = true;

        filtered.add(rule);
      }
    }

    // if no rules mached the feautre, re-run through the rules applying
    // any else filters
    if (!match && hasElseFilter) {
      // loop through again and apply all the else rules
      for (int i = 0; i < rules.length; i++) {
        Rule rule = rules[i];

        // is this rule within scale?
        if (!isWithInScale(rule, scaleDenominator)) {
          continue;
        }

        if (rule.hasElseFilter()) {
          filtered.add(rule);
        }
      }
    }

    return (Rule[]) filtered.toArray(new Rule[filtered.size()]);
  }
  /**
   * Encodes an SDO relate
   *
   * @param filter
   * @param property
   * @param geometry
   * @param extraData
   */
  protected void doSDORelate(
      Filter filter, Expression e1, Expression e2, boolean swapped, Object extraData)
      throws IOException {
    // grab the operating mask
    String mask = null;
    for (Class filterClass : SDO_RELATE_MASK_MAP.keySet()) {
      if (filterClass.isAssignableFrom(filter.getClass()))
        mask = SDO_RELATE_MASK_MAP.get(filterClass);
    }
    if (mask == null)
      throw new IllegalArgumentException(
          "Cannot encode filter " + filter.getClass() + " into a SDO_RELATE");
    if (swapped) mask = INVERSE_OPERATOR_MAP.get(mask);

    // ok, ready to write out the SDO_RELATE
    out.write("SDO_RELATE(");
    e1.accept(this, extraData);
    out.write(", ");
    e2.accept(this, extraData);
    // for disjoint we ask for no interaction, anyinteract == false
    if (filter instanceof Disjoint) {
      out.write(", 'mask=ANYINTERACT querytype=WINDOW') <> 'TRUE' ");
    } else {
      out.write(", 'mask=" + mask + " querytype=WINDOW') = 'TRUE' ");
    }
  }
  protected List<Feature> setNextFeature(String fId, List<Object> foreignIdValues)
      throws IOException {
    List<Feature> features = new ArrayList<Feature>();
    features.add(curSrcFeature);
    curSrcFeature = null;

    while (getSourceFeatureIterator().hasNext()) {
      Feature next = getSourceFeatureIterator().next();
      if (extractIdForFeature(next).equals(fId) && checkForeignIdValues(foreignIdValues, next)) {
        // HACK HACK HACK
        // evaluate filter that applies to this list as we want a subset
        // instead of full result
        // this is a temporary solution for Bureau of Meteorology
        // requirement for timePositionList
        if (listFilter != null) {
          if (listFilter.evaluate(next)) {
            features.add(next);
          }
          // END OF HACK
        } else {
          features.add(next);
        }
        // HACK HACK HACK
        // evaluate filter that applies to this list as we want a subset
        // instead of full result
        // this is a temporary solution for Bureau of Meteorology
        // requirement for timePositionList
      } else if (listFilter == null || listFilter.evaluate(next)) {
        // END OF HACK
        curSrcFeature = next;
        break;
      }
    }
    return features;
  }
Esempio n. 4
0
  @Override
  public Query adaptQuery(Query query) {
    Filter filter = query.getFilter();
    if (filter != null && !Filter.INCLUDE.equals(filter)) {
      Filter qualified = (Filter) filter.accept(NSS_QUALIFIER, null);
      Filter extended = (Filter) qualified.accept(PATH_EXTENDER, null);
      query.setFilter(extended);
    }

    SortBy[] sortBy = query.getSortBy();
    if (sortBy != null && sortBy.length > 0) {
      CSWPropertyPathExtender extender = new CSWPropertyPathExtender();
      for (int i = 0; i < sortBy.length; i++) {
        SortBy sb = sortBy[i];
        if (!SortBy.NATURAL_ORDER.equals(sb) && !SortBy.REVERSE_ORDER.equals(sb)) {
          PropertyName name = sb.getPropertyName();
          PropertyName extended = extender.extendProperty(name, FF, NAMESPACES);
          sortBy[i] = new SortByImpl(extended, sb.getSortOrder());
        }
      }
      query.setSortBy(sortBy);
    }

    return query;
  }
  @Override
  public Object visit(Or filter, Object extraData) {
    // scan, clone and simplify the children
    List<Filter> newChildren = new ArrayList<Filter>(filter.getChildren().size());
    for (Filter child : filter.getChildren()) {
      Filter cloned = (Filter) child.accept(this, extraData);

      // if any of the child filters is include,
      // the whole chain of OR is equivalent to
      // INCLUDE
      if (cloned == Filter.INCLUDE) return Filter.INCLUDE;

      // these can be skipped
      if (cloned == Filter.EXCLUDE) continue;

      newChildren.add(cloned);
    }

    // we might end up with an empty list
    if (newChildren.size() == 0) return Filter.EXCLUDE;

    // remove the logic we have only one filter
    if (newChildren.size() == 1) return newChildren.get(0);

    // else return the cloned and simplified up list
    return getFactory(extraData).or(newChildren);
  }
 /** {@inheritDoc } */
 @Override
 public boolean evaluate(final Object object) {
   for (Filter filter : filterArray) {
     if (!filter.evaluate(object)) {
       return false;
     }
   }
   return true;
 }
  /**
   * There might be multiple mappings per propery name, like <code>gml:name[1] = att1</code>, <code>
   * gml:name2 = strConcat(att2, att3)</code>, <code>gml:name[3] = "sampleValue</code>.
   *
   * <p>In the BoreHole test mapping used here, the following mappings exist for gml:name:
   *
   * <ul>
   *   <li>gml:name[1] = strConcat( strConcat(QS, strConcat("/", RT)), strConcat(strConcat("/",
   *       NUMB), strConcat("/", BSUFF)) )
   *   <li>gml:name[2] = BGS_ID
   *   <li>gml:name[3] = NAME
   *   <li>gml:name[4] = ORIGINAL_N
   * </ul>
   *
   * This means the "unrolled" filter for <code>gml:name = "SWADLINCOTE"</code> should be <code>
   * strConcat( strConcat(QS, ...) = "SWADLINCOTE"
   *          OR BGS_ID = "SWADLINCOTE"
   *          OR NAME = "SWADLINCOTE"
   *          OR ORIGINAL_N = "SWADLINCOTE"</code>
   *
   * <p>
   *
   * @throws Exception
   */
  @Test
  public void testCompareFilterMultipleMappingsPerPropertyName() throws Exception {
    final String XMMLNS = "http://www.opengis.net/xmml";
    final Name typeName = new NameImpl(XMMLNS, "Borehole");

    AppSchemaDataAccess complexDs = (AppSchemaDataAccess) mappingDataStore;
    mapping = complexDs.getMappingByElement(typeName);

    NamespaceSupport namespaces = new NamespaceSupport();
    namespaces.declarePrefix("gml", GML.NAMESPACE);
    namespaces.declarePrefix("xmml", XMMLNS);

    FilterFactory2 ff = new FilterFactoryImplNamespaceAware(namespaces);
    PropertyIsEqualTo complexFilter = ff.equals(ff.property("gml:name"), ff.literal("SWADLINCOTE"));

    visitor = new UnmappingFilterVisitor(mapping);

    Filter unrolled = (Filter) complexFilter.accept(visitor, null);
    assertNotNull(unrolled);
    assertNotSame(complexFilter, unrolled);

    assertTrue(unrolled.getClass().getName(), unrolled instanceof org.opengis.filter.Or);

    Or oredFilter = (Or) unrolled;
    List children = oredFilter.getChildren();
    assertEquals(4, children.size());

    assertTrue(children.get(0) instanceof PropertyIsEqualTo);
    assertTrue(children.get(1) instanceof PropertyIsEqualTo);
    assertTrue(children.get(2) instanceof PropertyIsEqualTo);
    assertTrue(children.get(3) instanceof PropertyIsEqualTo);

    PropertyIsEqualTo filter1 = (PropertyIsEqualTo) children.get(0);
    PropertyIsEqualTo filter2 = (PropertyIsEqualTo) children.get(1);
    PropertyIsEqualTo filter3 = (PropertyIsEqualTo) children.get(2);
    PropertyIsEqualTo filter4 = (PropertyIsEqualTo) children.get(3);

    assertTrue(filter1.getExpression1() instanceof Function);
    assertTrue(filter2.getExpression1() instanceof PropertyName);
    assertTrue(filter3.getExpression1() instanceof PropertyName);
    assertTrue(filter4.getExpression1() instanceof PropertyName);

    assertTrue(filter1.getExpression2() instanceof Literal);
    assertTrue(filter2.getExpression2() instanceof Literal);
    assertTrue(filter3.getExpression2() instanceof Literal);
    assertTrue(filter4.getExpression2() instanceof Literal);

    assertEquals("BGS_ID", ((PropertyName) filter2.getExpression1()).getPropertyName());
    assertEquals("NAME", ((PropertyName) filter3.getExpression1()).getPropertyName());
    assertEquals("ORIGINAL_N", ((PropertyName) filter4.getExpression1()).getPropertyName());
  }
Esempio n. 8
0
  /**
   * Generates the ecql predicates associated to the {@link List} of {@link Filter}s object.
   *
   * @param filterList
   * @return ecql predicates separated by ";"
   */
  public static String toCQL(List<Filter> filterList) {
    FilterToECQL toECQL = new FilterToECQL();

    StringBuilder output = new StringBuilder();
    Iterator<Filter> iter = filterList.iterator();
    while (iter.hasNext()) {
      Filter filter = iter.next();
      filter.accept(toECQL, output);
      if (iter.hasNext()) {
        output.append("; ");
      }
    }
    return output.toString();
  }
Esempio n. 9
0
 public static Filter reformatFilter(Filter filter, SimpleFeatureType ft) throws Exception {
   FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2();
   if (Filter.INCLUDE.equals(filter) || Filter.EXCLUDE.equals(filter)) {
     return filter;
   }
   for (FeatureTypeRelation rel : ft.getRelations()) {
     if (FeatureTypeRelation.JOIN.equals(rel.getType())) {
       filter = reformatFilter(filter, rel.getForeignFeatureType());
       filter = (Filter) filter.accept(new ValidFilterExtractor(rel), filter);
     }
   }
   filter = (Filter) filter.accept(new SimplifyingFilterVisitor(), null);
   return filter;
 }
Esempio n. 10
0
  private static Filter joinFilters(Filter... filters) {
    if (filters == null || filters.length == 0) {
      return Filter.EXCLUDE;
    }

    Filter result = null;
    if (filters.length > 0) {
      FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);
      result = ff.and(Arrays.asList(filters));
    } else if (filters.length == 1) {
      result = filters[0];
    }

    SimplifyingFilterVisitor visitor = new SimplifyingFilterVisitor();
    return (Filter) result.accept(visitor, null);
  }
Esempio n. 11
0
  /**
   * Generates the ecql predicate associated to the {@link Filter} object.
   *
   * @param filter
   * @return ecql predicate
   */
  public static String toCQL(Filter filter) {
    FilterToECQL toCQL = new FilterToECQL();

    StringBuilder output = (StringBuilder) filter.accept(toCQL, new StringBuilder());

    return output.toString();
  }
Esempio n. 12
0
 /**
  * @see org.geotools.data.AbstractDataStore#getCount(java.lang.String, org.geotools.data.Query)
  */
 protected int getCount(Query query) throws IOException {
   String featureTypeName = query.getTypeName();
   if (!featureType.getTypeName().equals(featureTypeName)) {
     throw new SchemaNotFoundException(featureTypeName);
   }
   int count = 0;
   FeatureIterator<SimpleFeature> iterator = collection.features();
   try {
     Filter filter = query.getFilter();
     while (iterator.hasNext() && (count < query.getMaxFeatures())) {
       if (filter.evaluate(iterator.next())) {
         count++;
       }
     }
   } finally {
     iterator.close();
   }
   return count;
 }
  @Override
  public boolean hasNext() {
    boolean exists = !isNextSourceFeatureNull();

    if (!isHasNextCalled()) {
      if (featureCounter < maxFeatures) {
        if (!exists && getSourceFeatureIterator() != null && getSourceFeatureIterator().hasNext()) {
          this.curSrcFeature = getSourceFeatureIterator().next();
          exists = true;
        }
        if (exists && filteredFeatures != null) {
          // get the next one if this row has already been added to the target
          // feature from setNextFilteredFeature
          while (exists && filteredFeatures.contains(extractIdForFeature(this.curSrcFeature))) {
            if (getSourceFeatureIterator() != null && getSourceFeatureIterator().hasNext()) {
              this.curSrcFeature = getSourceFeatureIterator().next();
              exists = true;
            } else {
              exists = false;
            }
          }
        }
        // HACK HACK HACK
        // evaluate filter that applies to this list as we want a subset
        // instead of full result
        // this is a temporary solution for Bureau of Meteorology
        // requirement for timePositionList
        if (listFilter != null) {
          while (exists && !listFilter.evaluate(curSrcFeature)) {
            // only add to subset if filter matches value
            if (getSourceFeatureIterator() != null && getSourceFeatureIterator().hasNext()) {
              this.curSrcFeature = getSourceFeatureIterator().next();
              exists = true;
            } else {
              exists = false;
            }
          }
        }
        // END OF HACK
      } else {
        exists = false;
      }
    }

    if (!exists) {
      LOGGER.finest("no more features, produced " + featureCounter);
      close();
      curSrcFeature = null;
    }

    setHasNextCalled(true);

    return exists;
  }
Esempio n. 14
0
 private Object buildBinaryLogicalOperator(
     final String operator, FilterVisitor visitor, BinaryLogicOperator filter, Object extraData) {
   StringWriter output = asStringWriter(extraData);
   List<Filter> children = filter.getChildren();
   if (children != null) {
     for (Iterator<Filter> i = children.iterator(); i.hasNext(); ) {
       Filter child = i.next();
       if (child instanceof BinaryLogicOperator) {
         output.append("(");
       }
       child.accept(visitor, output);
       if (child instanceof BinaryLogicOperator) {
         output.append(")");
       }
       if (i.hasNext()) {
         output.append(" ").append(operator).append(" ");
       }
     }
   }
   return output;
 }
Esempio n. 15
0
 /**
  * Performs the encoding, sends the encoded SOLR string to the writer passed in.
  *
  * @param filter the Filter to be encoded.
  * @throws Exception if there were io problems or unsupported filter operation
  */
 public void encode(Filter filter) throws Exception {
   if (out == null) throw new Exception("Can't encode to a null writer.");
   if (getCapabilities().fullySupports(filter)) {
     try {
       filter.accept(this, out);
     } catch (Exception ioe) {
       LOGGER.warning("Unable to export filter" + ioe);
       throw new Exception("Problem writing filter: ", ioe);
     }
   } else {
     throw new Exception("Filter type not supported");
   }
 }
Esempio n. 16
0
    /**
     * Given the <code>Filter</code> passed to the constructor, unpacks it to three different
     * filters, one for the supported SQL based filter, another for the supported Geometry based
     * filter, and the last one for the unsupported filter. All of them can be retrieved from its
     * corresponding getter.
     */
    private void createGeotoolsFilters() {
      FilterToSQLSDE sqlEncoder = getSqlEncoder();

      PostPreProcessFilterSplittingVisitor unpacker =
          new PostPreProcessFilterSplittingVisitor(sqlEncoder.getCapabilities(), featureType, null);
      sourceFilter.accept(unpacker, null);

      SimplifyingFilterVisitor filterSimplifier = new SimplifyingFilterVisitor();
      final String typeName = this.featureType.getTypeName();
      FIDValidator validator = new SimplifyingFilterVisitor.TypeNameDotNumberFidValidator(typeName);
      filterSimplifier.setFIDValidator(validator);

      this._sqlFilter = unpacker.getFilterPre();
      this._sqlFilter = (Filter) this._sqlFilter.accept(filterSimplifier, null);

      if (LOGGER.isLoggable(Level.FINE) && _sqlFilter != null)
        LOGGER.fine("SQL portion of SDE Query: '" + _sqlFilter + "'");

      Filter remainingFilter = unpacker.getFilterPost();

      unpacker =
          new PostPreProcessFilterSplittingVisitor(
              GeometryEncoderSDE.getCapabilities(), featureType, null);
      remainingFilter.accept(unpacker, null);

      this.geometryFilter = unpacker.getFilterPre();
      this.geometryFilter = (Filter) this.geometryFilter.accept(filterSimplifier, null);
      if (LOGGER.isLoggable(Level.FINE) && geometryFilter != null)
        LOGGER.fine("Spatial-Filter portion of SDE Query: '" + geometryFilter + "'");

      this.unsupportedFilter = unpacker.getFilterPost();
      this.unsupportedFilter = (Filter) this.unsupportedFilter.accept(filterSimplifier, null);
      if (LOGGER.isLoggable(Level.FINE) && unsupportedFilter != null)
        LOGGER.fine(
            "Unsupported (and therefore ignored) portion of SDE Query: '"
                + unsupportedFilter
                + "'");
    }
Esempio n. 17
0
  public static List<SimpleFeature> fromFeatureLayer(String mapName) throws IOException {

    ILayer selectedLayer = ApplicationGIS.getActiveMap().getEditManager().getSelectedLayer();
    FeatureSource<SimpleFeatureType, SimpleFeature> featureSource =
        selectedLayer.getResource(FeatureSource.class, new NullProgressMonitor());
    FeatureCollection<SimpleFeatureType, SimpleFeature> featureCollection = null;
    Filter filter = selectedLayer.getFilter();
    if (filter.equals(Filter.EXCLUDE)) {
      featureCollection = featureSource.getFeatures();
    } else {
      featureCollection = featureSource.getFeatures(filter);
    }

    List<SimpleFeature> featuresList = new ArrayList<SimpleFeature>();
    FeatureIterator<SimpleFeature> featureIterator = featureCollection.features();
    while (featureIterator.hasNext()) {
      SimpleFeature feature = featureIterator.next();
      featuresList.add(feature);
    }
    featureCollection.close(featureIterator);

    return featuresList;
  }
Esempio n. 18
0
  private Query reprojectFilter(Query query) throws IOException {
    final Filter originalFilter = query.getFilter() != null ? query.getFilter() : Filter.INCLUDE;
    if (Filter.INCLUDE.equals(originalFilter)) {
      return query;
    }

    final SimpleFeatureType nativeFeatureType = getSchema();
    final GeometryDescriptor geom = nativeFeatureType.getGeometryDescriptor();
    // if no geometry involved, no reprojection needed
    if (geom == null) {
      return query;
    }

    final FilterFactory2 ff = CommonFactoryFinder.getFilterFactory2(null);

    try {
      CoordinateReferenceSystem nativeCRS = geom.getCoordinateReferenceSystem();

      // now we apply a default to all geometries and bbox in the filter
      DefaultCRSFilterVisitor defaultCRSVisitor = new DefaultCRSFilterVisitor(ff, nativeCRS);
      final Filter defaultedFilter = (Filter) originalFilter.accept(defaultCRSVisitor, null);

      // and then we reproject all geometries so that the datastore
      // receives
      // them in the native projection system (or the forced one, in case
      // of force)
      ReprojectingFilterVisitor reprojectingVisitor =
          new ReprojectingFilterVisitor(ff, nativeFeatureType);
      final Filter reprojectedFilter = (Filter) defaultedFilter.accept(reprojectingVisitor, null);

      Query reprojectedQuery = new Query(query);
      reprojectedQuery.setFilter(reprojectedFilter);
      return reprojectedQuery;
    } catch (Exception e) {
      throw new DataSourceException("Had troubles handling filter reprojection...", e);
    }
  }
Esempio n. 19
0
  /** @param query */
  protected ReferencedEnvelope getBoundsInternal(Query query) {
    ReferencedEnvelope envelope =
        new ReferencedEnvelope(featureType.getCoordinateReferenceSystem());

    FeatureIterator<SimpleFeature> iterator = collection.features();
    try {
      if (iterator.hasNext()) {
        int count = 1;
        Filter filter = query.getFilter();

        while (iterator.hasNext() && (count < query.getMaxFeatures())) {
          SimpleFeature feature = iterator.next();
          if (filter.evaluate(feature)) {
            count++;
            envelope.expandToInclude(
                ((Geometry) feature.getDefaultGeometry()).getEnvelopeInternal());
          }
        }
      }
    } finally {
      iterator.close();
    }
    return envelope;
  }
Esempio n. 20
0
  private static List<String> getQueryColumns(
      final String[] queryProperties,
      final Filter unsupportedFilter,
      final SimpleFeatureType fullSchema)
      throws DataSourceException {
    final List<String> columnNames = new ArrayList<String>();

    if ((queryProperties == null) || (queryProperties.length == 0)) {
      final List<AttributeDescriptor> attNames = fullSchema.getAttributeDescriptors();
      for (Iterator<AttributeDescriptor> it = attNames.iterator(); it.hasNext(); ) {
        AttributeDescriptor att = it.next();
        String attName = att.getLocalName();
        // de namespace-ify the names
        // REVISIT: this shouldnt be needed!
        if (attName.indexOf(":") != -1) {
          attName = attName.substring(attName.indexOf(":") + 1);
        }
        columnNames.add(attName);
      }
    } else {
      columnNames.addAll(Arrays.asList(queryProperties));

      // Ok, say we don't support the full filter natively and it references
      // some properties, then they have to be retrieved in order to evaluate
      // the filter at runtime
      if (unsupportedFilter != null) {
        final FilterAttributeExtractor attExtractor;
        attExtractor = new FilterAttributeExtractor(fullSchema);
        unsupportedFilter.accept(attExtractor, null);
        final String[] filterAtts = attExtractor.getAttributeNames();
        for (String attName : filterAtts) {
          if (!columnNames.contains(attName)) {
            columnNames.add(attName);
          }
        }
      }
    }

    return columnNames;
  }
 @Override
 public String getDataSecurityToken(Authentication user, ResourceInfo resource) {
   Filter accessFilter = getAccessFilter(user);
   return new String(DigestUtils.md5Hex(accessFilter.toString()));
 }
  public static IMappingFeatureIterator getInstance(
      AppSchemaDataAccess store, FeatureTypeMapping mapping, Query query, Filter unrolledFilter)
      throws IOException {

    if (mapping instanceof XmlFeatureTypeMapping) {
      return new XmlMappingFeatureIterator(store, mapping, query);
    }

    if (AppSchemaDataAccessConfigurator.isJoining()) {
      if (!(query instanceof JoiningQuery)) {
        query = new JoiningQuery(query);
      }
      FeatureSource mappedSource = mapping.getSource();
      FilterCapabilities capabilities = getFilterCapabilities(mappedSource);

      IMappingFeatureIterator iterator;
      if (unrolledFilter != null) {
        query.setFilter(Filter.INCLUDE);
        Query unrolledQuery = store.unrollQuery(query, mapping);
        unrolledQuery.setFilter(unrolledFilter);
        if (isSimpleType(mapping)) {
          iterator = new MappingAttributeIterator(store, mapping, query, false, unrolledQuery);
        } else {
          iterator =
              new DataAccessMappingFeatureIterator(store, mapping, query, false, unrolledQuery);
        }

      } else {
        Filter filter = query.getFilter();
        ComplexFilterSplitter splitter = new ComplexFilterSplitter(capabilities, mapping);
        filter.accept(splitter, null);

        query.setFilter(splitter.getFilterPre());
        filter = splitter.getFilterPost();
        int maxFeatures = Query.DEFAULT_MAX;
        if (filter != null && filter != Filter.INCLUDE) {
          maxFeatures = query.getMaxFeatures();
          query.setMaxFeatures(Query.DEFAULT_MAX);
        }
        iterator = new DataAccessMappingFeatureIterator(store, mapping, query, false);
        if (filter != null && filter != Filter.INCLUDE) {
          iterator = new PostFilteringMappingFeatureIterator(iterator, filter, maxFeatures);
        }
      }
      return iterator;
    } else {
      if (query.getFilter() != null) {
        Query unrolledQuery = store.unrollQuery(query, mapping);
        Filter filter = unrolledQuery.getFilter();
        CheckIfNestedFilterVisitor visitor = new CheckIfNestedFilterVisitor();
        filter.accept(visitor, null);
        if (visitor.hasNestedAttributes) {
          FeatureSource mappedSource = mapping.getSource();
          if (mappedSource instanceof JDBCFeatureSource
              || mappedSource instanceof JDBCFeatureStore) {
            FilterCapabilities capabilities = getFilterCapabilities(mappedSource);
            ComplexFilterSplitter splitter = new ComplexFilterSplitter(capabilities, mapping);
            filter.accept(splitter, null);
            query.setFilter(splitter.getFilterPre());
            unrolledQuery.setFilter(splitter.getFilterPre());
            filter = splitter.getFilterPost();
          } else {
            // VT:no Filtering capbilities cause source may not be of jdbc type
            // therefore we continue;
            // has nested attribute in the filter expression
            unrolledQuery.setFilter(Filter.INCLUDE);
          }
          return new FilteringMappingFeatureIterator(store, mapping, query, unrolledQuery, filter);
        } else if (!filter.equals(Filter.INCLUDE)
            && !filter.equals(Filter.EXCLUDE)
            && !(filter instanceof FidFilterImpl)) {
          // normal filters
          if (isSimpleType(mapping)) {
            return new MappingAttributeIterator(store, mapping, query, true, unrolledQuery);
          } else {
            return new DataAccessMappingFeatureIterator(store, mapping, query, true, unrolledQuery);
          }
        }
      }

      return new DataAccessMappingFeatureIterator(store, mapping, query, false);
    }
  }
  private void visitLogicOperator(Filter filter) {
    if (original == null) original = filter;

    if (!fcs.supports(Not.class) && !fcs.supports(And.class) && !fcs.supports(Or.class)) {
      postStack.push(filter);
      return;
    }

    int i = postStack.size();
    int j = preStack.size();
    if (filter instanceof Not) {

      if (((Not) filter).getFilter() != null) {
        Filter next = ((Not) filter).getFilter();
        next.accept(this, null);

        if (i < postStack.size()) {
          // since and can split filter into both pre and post parts
          // the parts have to be combined since ~(A^B) == ~A | ~B
          // combining is easy since filter==combined result however both post and pre stacks
          // must be cleared since both may have components of the filter
          popToSize(postStack, i);
          popToSize(preStack, j);
          postStack.push(filter);
        } else {
          popToSize(preStack, j);
          preStack.push(filter);
        }
      }
    } else {
      if (filter instanceof Or) {
        Filter orReplacement;

        try {
          orReplacement = translateOr((Or) filter);
          orReplacement.accept(this, null);
        } catch (IllegalFilterException e) {
          popToSize(preStack, j);
          postStack.push(filter);
          return;
        }
        if (postStack.size() > i) {
          popToSize(postStack, i);
          postStack.push(filter);

          return;
        }

        preStack.pop();
        preStack.push(filter);
      } else {
        // it's an AND
        Iterator it = ((And) filter).getChildren().iterator();

        while (it.hasNext()) {
          Filter next = (Filter) it.next();
          next.accept(this, null);
        }

        // combine the unsupported and add to the top
        if (i < postStack.size()) {
          if (filter instanceof And) {
            Filter f = (Filter) postStack.pop();

            while (postStack.size() > i) f = ff.and(f, (Filter) postStack.pop());

            postStack.push(f);

            if (j < preStack.size()) {
              f = (Filter) preStack.pop();

              while (preStack.size() > j) f = ff.and(f, (Filter) preStack.pop());
              preStack.push(f);
            }
          } else {
            logger.warning("LogicFilter found which is not 'and, or, not");

            popToSize(postStack, i);
            popToSize(preStack, j);

            postStack.push(filter);
          }
        } else {
          popToSize(preStack, j);
          preStack.push(filter);
        }
      }
    }
  }
Esempio n. 24
0
  @Override
  // Returned Map will have suffixes in the key names - client is responsible for handling them
  public List<Map<String, Object>> get(String type, String cql) throws PersistenceException {
    if (StringUtils.isBlank(type)) {
      throw new PersistenceException(
          "The type of object(s) to retrieve must be non-null and not blank, e.g., notification, metacard, etc.");
    }

    List<Map<String, Object>> results = new ArrayList<>();

    // Set Solr Core name to type and create/connect to Solr Core
    SolrServer coreSolrServer = getSolrCore(type);
    if (coreSolrServer == null) {
      return results;
    }

    SolrQueryFilterVisitor visitor = new SolrQueryFilterVisitor(coreSolrServer, type);

    try {
      SolrQuery solrQuery;
      // If not cql specified, then return all items
      if (StringUtils.isBlank(cql)) {
        solrQuery = new SolrQuery("*:*");
      } else {
        Filter filter = CQL.toFilter(cql);
        solrQuery = (SolrQuery) filter.accept(visitor, null);
      }
      QueryResponse solrResponse = coreSolrServer.query(solrQuery, METHOD.POST);
      long numResults = solrResponse.getResults().getNumFound();
      LOGGER.debug("numResults = {}", numResults);

      SolrDocumentList docs = solrResponse.getResults();
      for (SolrDocument doc : docs) {
        PersistentItem result = new PersistentItem();
        Collection<String> fieldNames = doc.getFieldNames();
        for (String name : fieldNames) {
          LOGGER.debug("field name = {} has value = {}", name, doc.getFieldValue(name));
          if (name.endsWith(PersistentItem.TEXT_SUFFIX) && doc.getFieldValues(name).size() > 1) {
            result.addProperty(
                name,
                doc.getFieldValues(name)
                    .stream()
                    .filter(s -> s instanceof String)
                    .map(s -> (String) s)
                    .collect(Collectors.toSet()));
          } else if (name.endsWith(PersistentItem.XML_SUFFIX)) {
            result.addXmlProperty(name, (String) doc.getFirstValue(name));
          } else if (name.endsWith(PersistentItem.TEXT_SUFFIX)) {
            result.addProperty(name, (String) doc.getFirstValue(name));
          } else if (name.endsWith(PersistentItem.LONG_SUFFIX)) {
            result.addProperty(name, (Long) doc.getFirstValue(name));
          } else if (name.endsWith(PersistentItem.INT_SUFFIX)) {
            result.addProperty(name, (Integer) doc.getFirstValue(name));
          } else if (name.endsWith(PersistentItem.DATE_SUFFIX)) {
            result.addProperty(name, (Date) doc.getFirstValue(name));
          } else {
            LOGGER.info("Not adding field {} because it has invalid suffix", name);
          }
        }
        results.add(result);
      }
    } catch (CQLException e) {
      throw new PersistenceException(
          "CQLException while getting Solr data with cql statement " + cql, e);
    } catch (SolrServerException e) {
      throw new PersistenceException(
          "SolrServerException while getting Solr data with cql statement " + cql, e);
    }

    return results;
  }
Esempio n. 25
0
 @Override
 public Object accept(FilterVisitor visitor, Object obj) {
   return filter.accept(visitor, obj);
 }
Esempio n. 26
0
  // Jody - Recomend moving to the following
  // When we are ready for CoordinateSystem support
  public FeatureReader<SimpleFeatureType, SimpleFeature> getFeatureReader(
      Query query, Transaction transaction) throws IOException {
    Filter filter = query.getFilter();
    String typeName = query.getTypeName();
    String propertyNames[] = query.getPropertyNames();

    if (filter == null) {
      throw new NullPointerException(
          "getFeatureReader requires Filter: " + "did you mean Filter.INCLUDE?");
    }
    if (typeName == null) {
      throw new NullPointerException(
          "getFeatureReader requires typeName: "
              + "use getTypeNames() for a list of available types");
    }
    if (transaction == null) {
      throw new NullPointerException(
          "getFeatureReader requires Transaction: "
              + "did you mean to use Transaction.AUTO_COMMIT?");
    }
    SimpleFeatureType featureType = getSchema(query.getTypeName());

    if (propertyNames != null || query.getCoordinateSystem() != null) {
      try {
        featureType =
            DataUtilities.createSubType(featureType, propertyNames, query.getCoordinateSystem());
      } catch (SchemaException e) {
        LOGGER.log(Level.FINEST, e.getMessage(), e);
        throw new DataSourceException("Could not create Feature Type for query", e);
      }
    }
    if (filter == Filter.EXCLUDE || filter.equals(Filter.EXCLUDE)) {
      return new EmptyFeatureReader<SimpleFeatureType, SimpleFeature>(featureType);
    }
    // GR: allow subclases to implement as much filtering as they can,
    // by returning just it's unsupperted filter
    filter = getUnsupportedFilter(typeName, filter);
    if (filter == null) {
      throw new NullPointerException(
          "getUnsupportedFilter shouldn't return null. Do you mean Filter.INCLUDE?");
    }

    // There are cases where the readers have to lock.  Take shapefile for example.  Getting a
    // Reader causes
    // the file to be locked.  However on a commit TransactionStateDiff locks before a writer is
    // obtained.  In order to
    // prevent deadlocks either the diff has to obtained first or the reader has to be obtained
    // first.
    // Because shapefile writes to a buffer first the actual write lock is not flipped until the
    // transaction has most of the work
    // done.  As a result I suggest getting the diff first then getting the reader.
    // JE
    Diff diff = null;
    if (transaction != Transaction.AUTO_COMMIT) {
      TransactionStateDiff state = state(transaction);
      if (state != null) {
        diff = state.diff(typeName);
      }
    }

    // This calls our subclass "simple" implementation
    // All other functionality will be built as a reader around
    // this class
    //
    FeatureReader<SimpleFeatureType, SimpleFeature> reader = getFeatureReader(typeName, query);

    if (diff != null)
      reader =
          new DiffFeatureReader<SimpleFeatureType, SimpleFeature>(reader, diff, query.getFilter());

    if (!filter.equals(Filter.INCLUDE)) {
      reader = new FilteringFeatureReader<SimpleFeatureType, SimpleFeature>(reader, filter);
    }

    if (!featureType.equals(reader.getFeatureType())) {
      LOGGER.fine("Recasting feature type to subtype by using a ReTypeFeatureReader");
      reader = new ReTypeFeatureReader(reader, featureType, false);
    }

    if (query.getMaxFeatures() != Query.DEFAULT_MAX) {
      reader =
          new MaxFeatureReader<SimpleFeatureType, SimpleFeature>(reader, query.getMaxFeatures());
    }

    return reader;
  }
Esempio n. 27
0
 @Override
 public boolean evaluate(Object object) {
   return filter.evaluate(object);
 }
Esempio n. 28
0
 @Override
 public Object accept(FilterVisitor visitor, Object extraData) {
   return filter.accept(visitor, extraData);
 }
Esempio n. 29
0
  /**
   * Takes two {@link Query}objects and produce a new one by mixing the restrictions of both of
   * them.
   *
   * <p>The policy to mix the queries components is the following:
   *
   * <ul>
   *   <li>typeName: type names MUST match (not checked if some or both queries equals to <code>
   *       Query.ALL</code>)
   *   <li>handle: you must provide one since no sensible choice can be done between the handles of
   *       both queries
   *   <li>maxFeatures: the lower of the two maxFeatures values will be used (most restrictive)
   *   <li>attributeNames: the attributes of both queries will be joined in a single set of
   *       attributes. IMPORTANT: only <b><i>explicitly</i></b> requested attributes will be joint,
   *       so, if the method <code>retrieveAllProperties()</code> of some of the queries returns
   *       <code>true</code> it does not means that all the properties will be joined. You must
   *       create the query with the names of the properties you want to load.
   *   <li>filter: the filtets of both queries are or'ed
   *   <li><b>any other query property is ignored</b> and no guarantees are made of their return
   *       values, so client code shall explicitly care of hints, startIndex, etc., if needed.
   * </ul>
   *
   * @param firstQuery Query against this DataStore
   * @param secondQuery DOCUMENT ME!
   * @param handle DOCUMENT ME!
   * @return Query restricted to the limits of definitionQuery
   * @throws NullPointerException if some of the queries is null
   * @throws IllegalArgumentException if the type names of both queries do not match
   */
  public static Query mixQueries(Query firstQuery, Query secondQuery, String handle) {
    if ((firstQuery == null) && (secondQuery == null)) {
      // throw new NullPointerException("Cannot combine two null queries");
      return Query.ALL;
    }
    if (firstQuery == null || firstQuery.equals(Query.ALL)) {
      return secondQuery;
    } else if (secondQuery == null || secondQuery.equals(Query.ALL)) {
      return firstQuery;
    }
    if ((firstQuery.getTypeName() != null) && (secondQuery.getTypeName() != null)) {
      if (!firstQuery.getTypeName().equals(secondQuery.getTypeName())) {
        String msg =
            "Type names do not match: "
                + firstQuery.getTypeName()
                + " != "
                + secondQuery.getTypeName();
        throw new IllegalArgumentException(msg);
      }
    }

    // mix versions, if possible
    String version;
    if (firstQuery.getVersion() != null) {
      if (secondQuery.getVersion() != null
          && !secondQuery.getVersion().equals(firstQuery.getVersion()))
        throw new IllegalArgumentException("First and second query refer different versions");
      version = firstQuery.getVersion();
    } else {
      version = secondQuery.getVersion();
    }

    // none of the queries equals Query.ALL, mix them
    // use the more restrictive max features field
    int maxFeatures = Math.min(firstQuery.getMaxFeatures(), secondQuery.getMaxFeatures());

    // join attributes names
    String[] propNames =
        joinAttributes(firstQuery.getPropertyNames(), secondQuery.getPropertyNames());

    // join filters
    Filter filter = firstQuery.getFilter();
    Filter filter2 = secondQuery.getFilter();

    if ((filter == null) || filter.equals(Filter.INCLUDE)) {
      filter = filter2;
    } else if ((filter2 != null) && !filter2.equals(Filter.INCLUDE)) {
      filter = ff.and(filter, filter2);
    }
    Integer start = 0;
    if (firstQuery.getStartIndex() != null) {
      start = firstQuery.getStartIndex();
    }
    if (secondQuery.getStartIndex() != null) {
      start += secondQuery.getStartIndex();
    }
    // collect all hints
    Hints hints = new Hints();
    if (firstQuery.getHints() != null) {
      hints.putAll(firstQuery.getHints());
    }
    if (secondQuery.getHints() != null) {
      hints.putAll(secondQuery.getHints());
    }
    // build the mixed query
    String typeName =
        firstQuery.getTypeName() != null ? firstQuery.getTypeName() : secondQuery.getTypeName();

    Query mixed = new Query(typeName, filter, maxFeatures, propNames, handle);
    mixed.setVersion(version);
    mixed.setHints(hints);
    if (start != 0) {
      mixed.setStartIndex(start);
    }
    return mixed;
  }
Esempio n. 30
0
 /** Transform provided filter; resolving property names */
 public static Filter resolvePropertyNames(Filter filter, SimpleFeatureType schema) {
   if (filter == null || filter == Filter.INCLUDE || filter == Filter.EXCLUDE) {
     return filter;
   }
   return (Filter) filter.accept(new PropertyNameResolvingVisitor(schema), null);
 }