/**
   * {@inheritDoc}
   *
   * @param params a {@code Double} value for the search radius to use with point or line features
   * @return the features that lie within the search radius of {@code pos}; if this helper is not
   *     valid an empty collection will be returned
   * @throws IOException if the feature source for the layer cannot be accessed
   */
  public FeatureCollection getInfo(DirectPosition2D pos, Object... params) throws IOException {

    FeatureCollection<? extends FeatureType, ? extends Feature> collection = null;
    MapLayer layer = layerRef.get();

    if (layer != null) {
      Filter filter = null;
      if (isPolygonGeometry) {
        /*
         * Polygon features - use an intersects filter
         */
        Geometry posGeom = createSearchPos(pos);
        filter =
            filterFactory.intersects(
                filterFactory.property(attrName), filterFactory.literal(posGeom));

      } else {
        /*
         * Line or point features - use a bounding box filter
         */
        double radius = ((Number) params[0]).doubleValue();
        ReferencedEnvelope env = createSearchEnv(pos, radius);
        filter = filterFactory.bbox(filterFactory.property(attrName), env);
      }

      DefaultQuery query = new DefaultQuery(null, filter);
      query.setCoordinateSystemReproject(getMapContext().getCoordinateReferenceSystem());
      collection = layer.getFeatureSource().getFeatures(query);
    }

    return collection;
  }
  public FeatureCollection<SimpleFeatureType, SimpleFeature> getVersionedFeatures(Query query)
      throws IOException {
    final SimpleFeatureType ft = getSchema();

    // check the feature type is the right one
    final String typeName = ft.getTypeName();
    if (query.getTypeName() != null && !query.getTypeName().equals(typeName))
      throw new IOException("Incompatible type, this class can access only " + typeName);

    // make sure the view is around
    if (!Arrays.asList(store.wrapped.getTypeNames()).contains(store.getVFCViewName(typeName)))
      store.createVersionedFeatureCollectionView(typeName);

    // we have to hit the view
    DefaultQuery vq = new DefaultQuery(query);
    vq.setTypeName(VersionedPostgisDataStore.getVFCViewName(typeName));
    vq = store.buildVersionedQuery(vq);
    FeatureCollection<SimpleFeatureType, SimpleFeature> fc =
        store
            .wrapped
            .getFeatureSource(VersionedPostgisDataStore.getVFCViewName(typeName))
            .getFeatures(vq);
    final SimpleFeatureType fcSchema = fc.getSchema();
    // build a renamed feature type with the same attributes as the feature collection
    SimpleFeatureTypeBuilder builder = new SimpleFeatureTypeBuilder();
    builder.init(ft);
    builder.setAttributes(fc.getSchema().getAttributeDescriptors());
    SimpleFeatureType renamedFt = builder.buildFeatureType();
    return new ReTypingFeatureCollection(fc, renamedFt);
  }
  public SimpleFeatureCollection subCollection(Filter filter) {
    Query query = new DefaultQuery();
    ((DefaultQuery) query).setFilter(filter);

    query = DataUtilities.mixQueries(this.query, query, null);
    return new ContentFeatureCollection(featureSource, query);
  }
  public SimpleFeatureCollection sort(org.opengis.filter.sort.SortBy sort) {
    Query query = new DefaultQuery();
    ((DefaultQuery) query).setSortBy(new org.opengis.filter.sort.SortBy[] {sort});

    query = DataUtilities.mixQueries(this.query, query, null);
    return new ContentFeatureCollection(featureSource, query);
  }
  public void testSumWithLimitOffset() throws Exception {
    if (!dataStore.getSQLDialect().isLimitOffsetSupported()) {
      return;
    }
    FilterFactory ff = dataStore.getFilterFactory();
    PropertyName p = ff.property(aname("doubleProperty"));

    SumVisitor v = new MySumVisitor(p);

    DefaultQuery q = new DefaultQuery(tname("ft1"));
    q.setStartIndex(0);
    q.setMaxFeatures(2);

    dataStore.getFeatureSource(tname("ft1")).accepts(q, v, null);
    assertFalse(visited);
    assertEquals(1.1, v.getResult().toDouble(), 0.01);
  }
 /**
  * Clones the query and sets the proper type name into it
  *
  * @param query
  * @return
  */
 private Query getTypedQuery(Query query) {
   DefaultQuery q = new DefaultQuery(query);
   q.setTypeName(schema.getTypeName());
   return q;
 }
  public FeatureCollection<SimpleFeatureType, SimpleFeature> getLog(
      String fromVersion, String toVersion, Filter filter, String[] userIds, int maxRows)
      throws IOException {
    if (filter == null) filter = Filter.INCLUDE;
    RevisionInfo r1 = new RevisionInfo(fromVersion);
    RevisionInfo r2 = new RevisionInfo(toVersion);

    boolean swapped = false;
    if (r1.revision > r2.revision) {
      // swap them
      RevisionInfo tmpr = r1;
      r1 = r2;
      r2 = tmpr;
      String tmps = toVersion;
      toVersion = fromVersion;
      fromVersion = tmps;
      swapped = true;
    }

    // We implement this exactly as described. Happily, it seems Postgis does not have
    // sql lentgh limitations. Yet, if would be a lot better if we could encode this
    // as a single sql query with subqueries... (but not all filters are encodable...)
    ModifiedFeatureIds mfids =
        store.getModifiedFeatureFIDs(
            schema.getTypeName(), fromVersion, toVersion, filter, userIds, getTransaction());
    Set ids = new HashSet(mfids.getCreated());
    ids.addAll(mfids.getDeleted());
    ids.addAll(mfids.getModified());

    // grab the eventually modified revisions from mfids
    r1 = mfids.fromRevision;
    r2 = mfids.toRevision;

    // no changes?
    if (ids.isEmpty()) return new EmptyFeatureCollection(schema);

    // Create a filter that sounds like:
    // (revision > r1 and revision <= r2) or (expired > r1 and expired <= r2) and fid in
    // (fidlist)
    FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);
    Filter fidFilter = store.buildFidFilter(ids);
    Filter transformedFidFilter = store.transformFidFilter(schema.getTypeName(), fidFilter);
    Filter revGrR1 = ff.greater(ff.property("revision"), ff.literal(r1.revision));
    Filter revLeR2 = ff.lessOrEqual(ff.property("revision"), ff.literal(r2.revision));
    Filter expGrR1 = ff.greater(ff.property("expired"), ff.literal(r1.revision));
    Filter expLeR2 = ff.lessOrEqual(ff.property("expired"), ff.literal(r2.revision));
    Filter versionFilter =
        ff.and(transformedFidFilter, ff.or(ff.and(revGrR1, revLeR2), ff.and(expGrR1, expLeR2)));

    // We just want the revision and expired, build a query against the real feature type
    DefaultQuery q =
        new DefaultQuery(schema.getTypeName(), versionFilter, new String[] {"revision", "expired"});
    FeatureReader<SimpleFeatureType, SimpleFeature> fr = null;
    SortedSet revisions = new TreeSet();
    try {
      fr = store.wrapped.getFeatureReader(q, getTransaction());
      while (fr.hasNext()) {
        SimpleFeature f = fr.next();
        Long revision = (Long) f.getAttribute(0);
        if (revision.longValue() > r1.revision) revisions.add(revision);
        Long expired = (Long) f.getAttribute(1);
        if (expired.longValue() != Long.MAX_VALUE && expired.longValue() > r1.revision)
          revisions.add(expired);
      }
    } catch (Exception e) {
      throw new DataSourceException("Error reading modified revisions from datastore", e);
    } finally {
      if (fr != null) fr.close();
    }

    // now, we have a list of revisions between a min and a max
    // let's try to build a fid filter with revisions from the biggest to the smallest
    Set revisionIdSet = new HashSet();
    for (Iterator it = revisions.iterator(); it.hasNext(); ) {
      Long rev = (Long) it.next();
      revisionIdSet.add(
          ff.featureId(VersionedPostgisDataStore.TBL_CHANGESETS + "." + rev.toString()));
    }
    if (revisionIdSet.isEmpty()) return new EmptyFeatureCollection(schema);
    Filter revisionFilter = ff.id(revisionIdSet);

    // return the changelog
    // TODO: sort on revision descending. Unfortunately, to do so we have to fix fid mappers,
    // so that auto-increment can return revision among the attributes, and at the same
    // time simply allow not include fid attributes in the insert queries (or provide a
    // "default"
    // value for them).
    FeatureSource<SimpleFeatureType, SimpleFeature> changesets =
        (FeatureSource<SimpleFeatureType, SimpleFeature>)
            store.getFeatureSource(VersionedPostgisDataStore.TBL_CHANGESETS);
    DefaultQuery sq = new DefaultQuery();
    sq.setFilter(revisionFilter);
    final SortOrder order = swapped ? SortOrder.ASCENDING : SortOrder.DESCENDING;
    sq.setSortBy(new SortBy[] {ff.sort("revision", order)});
    if (maxRows > 0) sq.setMaxFeatures(maxRows);
    return changesets.getFeatures(sq);
  }
  public void rollback(String toVersion, Filter filter, String[] userIds) throws IOException {
    // TODO: build an optimized version of this that can do the same work with a couple
    // of queries assuming the filter is fully encodable

    Transaction t = getTransaction();
    boolean autoCommit = false;
    if (Transaction.AUTO_COMMIT.equals(t)) {
      t = new DefaultTransaction();
      autoCommit = true;
    }

    // Gather feature modified after toVersion
    ModifiedFeatureIds mfids =
        store.getModifiedFeatureFIDs(schema.getTypeName(), toVersion, null, filter, userIds, t);
    FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);

    // grab the state, we need to mark as dirty all the features we are going to modify/re-insert
    VersionedJdbcTransactionState state = store.wrapped.getVersionedJdbcTransactionState(t);

    // remove all features that have been created and not deleted
    Set fidsToRemove = new HashSet(mfids.getCreated());
    fidsToRemove.removeAll(mfids.getDeleted());
    if (!fidsToRemove.isEmpty()) {
      removeFeatures(store.buildFidFilter(fidsToRemove));
      state.setTypeNameDirty(getSchema().getTypeName());
    }

    // reinstate all features that were there before toVersion and that
    // have been deleted after it. Notice this is an insertion, so to preserve
    // the fids I have to use low level writers where I can set all attributes manually
    // (we work on the assumption the wrapped data store maps all attributes of the primary
    // key in the feature itself)
    Set fidsToRecreate = new HashSet(mfids.getDeleted());
    fidsToRecreate.removeAll(mfids.getCreated());
    if (!fidsToRecreate.isEmpty()) {
      state.setTypeNameDirty(getSchema().getTypeName());
      state.setFidsDirty(getSchema().getTypeName(), fidsToRecreate);

      long revision = store.wrapped.getVersionedJdbcTransactionState(t).getRevision();
      Filter recreateFilter =
          store.buildVersionedFilter(
              schema.getTypeName(), store.buildFidFilter(fidsToRecreate), mfids.fromRevision);
      FeatureReader<SimpleFeatureType, SimpleFeature> fr = null;
      FeatureWriter<SimpleFeatureType, SimpleFeature> fw = null;
      try {
        DefaultQuery q = new DefaultQuery(schema.getTypeName(), recreateFilter);
        fr = store.wrapped.getFeatureReader(q, t);
        fw = store.wrapped.getFeatureWriterAppend(schema.getTypeName(), t);
        while (fr.hasNext()) {
          SimpleFeature original = fr.next();
          SimpleFeature restored = fw.next();
          for (int i = 0; i < original.getFeatureType().getAttributeCount(); i++) {
            restored.setAttribute(i, original.getAttribute(i));
          }
          restored.setAttribute("revision", new Long(revision));
          restored.setAttribute("expired", new Long(Long.MAX_VALUE));
          fw.write();
        }
      } catch (IllegalAttributeException iae) {
        throw new DataSourceException(
            "Unexpected error occurred while " + "restoring deleted featues", iae);
      } finally {
        if (fr != null) fr.close();
        if (fw != null) fw.close();
      }
    }

    // Now onto the modified features, that were there, and still are there.
    // Since we cannot get a sorted writer we have to do a kind of inner loop scan
    // (note, a parellel scan of similarly sorted reader and writer would be more
    // efficient, but writer sorting is not there...)
    // Here it's possible to work against the external API, thought it would be more
    // efficient (but more complex) to work against the wrapped one.
    if (!mfids.getModified().isEmpty()) {
      state.setTypeNameDirty(getSchema().getTypeName());
      state.setFidsDirty(getSchema().getTypeName(), mfids.getModified());

      Filter modifiedIdFilter = store.buildFidFilter(mfids.getModified());
      Filter mifCurrent =
          store.buildVersionedFilter(schema.getTypeName(), modifiedIdFilter, new RevisionInfo());
      FeatureReader<SimpleFeatureType, SimpleFeature> fr = null;
      FeatureWriter<SimpleFeatureType, SimpleFeature> fw = null;
      try {
        fw = store.getFeatureWriter(schema.getTypeName(), mifCurrent, t);
        while (fw.hasNext()) {
          SimpleFeature current = fw.next();
          Filter currIdFilter = ff.id(Collections.singleton(ff.featureId(current.getID())));
          Filter cidToVersion =
              store.buildVersionedFilter(schema.getTypeName(), currIdFilter, mfids.fromRevision);
          DefaultQuery q = new DefaultQuery(schema.getTypeName(), cidToVersion);
          q.setVersion(mfids.fromRevision.toString());
          fr = store.getFeatureReader(q, t);
          SimpleFeature original = fr.next();
          for (int i = 0; i < original.getFeatureType().getAttributeCount(); i++) {
            current.setAttribute(i, original.getAttribute(i));
          }
          fr.close();
          fw.write();
        }
      } catch (IllegalAttributeException iae) {
        throw new DataSourceException(
            "Unexpected error occurred while " + "restoring deleted featues", iae);
      } finally {
        if (fr != null) fr.close();
        if (fw != null) fw.close();
      }
    }

    // if it's auto commit, don't forget to actually commit
    if (autoCommit) {
      t.commit();
      t.close();
    }
  }