Esempio n. 1
0
  /**
   * Tests case where filter is makes use of 2 different attributes but Query object only requests 1
   * of the two attributes. This is a fix for a bug that has occurred.
   */
  @Test
  public void testFeatureReaderWithQuery() throws Exception {
    if (url == null) return;
    Map m = new HashMap();
    m.put(WFSDataStoreFactory.URL.key, url);
    m.put(WFSDataStoreFactory.TIMEOUT.key, new Integer(100000));
    WFS_1_0_0_DataStore wfs = (WFS_1_0_0_DataStore) (new WFSDataStoreFactory()).createDataStore(m);
    FilterFactory2 fac = CommonFactoryFinder.getFilterFactory2(GeoTools.getDefaultHints());

    Filter filter = fac.equals(fac.property("NAME"), fac.literal("E 58th St"));

    Query query = new Query("tiger:tiger_roads", filter);
    FeatureReader<SimpleFeatureType, SimpleFeature> reader =
        wfs.getFeatureReader(query, new DefaultTransaction());
    int expected = 0;
    while (reader.hasNext()) {
      expected++;
      reader.next();
    }
    query = new Query("tiger:tiger_roads", filter, 100, new String[] {"CFCC"}, "");
    reader = wfs.getFeatureReader(query, new DefaultTransaction());
    int count = 0;
    while (reader.hasNext()) {
      count++;
      reader.next();
    }

    assertEquals(expected, count);
  }
  /** @see org.geotools.data.FeatureStore#setFeatures(org.geotools.data.FeatureReader) */
  public void setFeatures(FeatureReader<SimpleFeatureType, SimpleFeature> reader)
      throws IOException {
    WFSTransactionState ts = null;

    if (trans == Transaction.AUTO_COMMIT) {
      ts = new WFSTransactionState(ds);
    } else {
      ts = (WFSTransactionState) trans.getState(ds);
    }

    ts.addAction(
        getSchema().getTypeName(), new DeleteAction(getSchema().getTypeName(), Filter.INCLUDE));

    ReferencedEnvelope bounds = null;
    while (reader.hasNext()) {

      try {
        SimpleFeature f = reader.next();
        List<AttributeDescriptor> atrs = f.getFeatureType().getAttributeDescriptors();
        for (int i = 0; i < atrs.size(); i++) {
          if (atrs.get(i) instanceof GeometryDescriptor) {
            Geometry g = (Geometry) f.getAttribute(i);
            CoordinateReferenceSystem cs =
                ((GeometryDescriptor) atrs.get(i)).getCoordinateReferenceSystem();
            if (cs != null && !cs.getIdentifiers().isEmpty())
              g.setUserData(cs.getIdentifiers().iterator().next().toString());
            if (g == null) continue;
            if (bounds == null) {
              bounds = new ReferencedEnvelope(g.getEnvelopeInternal(), cs);
            } else {
              bounds.expandToInclude(g.getEnvelopeInternal());
            }
          }
        }
        ts.addAction(getSchema().getTypeName(), new InsertAction(f));
      } catch (NoSuchElementException e) {
        WFS_1_0_0_DataStore.LOGGER.warning(e.toString());
      } catch (IllegalAttributeException e) {
        WFS_1_0_0_DataStore.LOGGER.warning(e.toString());
      }
    }

    // Fire a notification.
    // JE
    if (bounds == null) {
      // if bounds are null then send an envelope to say that features were added but
      // at an unknown location.
      bounds = new ReferencedEnvelope(getSchema().getCoordinateReferenceSystem());
      ((WFS_1_0_0_DataStore) getDataStore())
          .listenerManager.fireFeaturesRemoved(
              getSchema().getTypeName(), getTransaction(), bounds, false);
    } else {
      ((WFS_1_0_0_DataStore) getDataStore())
          .listenerManager.fireFeaturesRemoved(
              getSchema().getTypeName(), getTransaction(), bounds, false);
    }
    if (trans == Transaction.AUTO_COMMIT) {
      ts.commit();
    }
  }
Esempio n. 3
0
  /**
   * Sets the feature of the source.
   *
   * <p>This method operates by first clearing the contents of the feature store ({@link
   * #removeFeatures(Filter)}), and then obtaining an appending feature writer and writing all
   * features from <tt>reader</tt> to it.
   */
  public final void setFeatures(FeatureReader<SimpleFeatureType, SimpleFeature> reader)
      throws IOException {
    // remove features
    removeFeatures(Filter.INCLUDE);

    // grab a feature writer for insert
    FeatureWriter<SimpleFeatureType, SimpleFeature> writer = getWriter(Filter.INCLUDE, WRITER_ADD);
    try {
      while (reader.hasNext()) {
        SimpleFeature feature = reader.next();

        // grab next feature and populate it
        // JD: worth a note on how we do this... we take a "pull" approach
        // because the raw schema we are inserting into may not match the
        // schema of the features we are inserting
        SimpleFeature toWrite = writer.next();
        for (int i = 0; i < toWrite.getType().getAttributeCount(); i++) {
          String name = toWrite.getType().getDescriptor(i).getLocalName();
          toWrite.setAttribute(name, feature.getAttribute(name));
        }

        // perform the write
        writer.write();
      }
    } finally {
      writer.close();
    }
  }
Esempio n. 4
0
  public static void main(String[] args) {
    System.out.println("WFS Demo");
    try {
      //            URL url = new
      // URL("http://www2.dmsolutions.ca/cgi-bin/mswfs_gmap?version=1.0.0&request=getcapabilities&service=wfs");
      URL url = new URL("http://www.refractions.net:8080/geoserver/wfs?REQUEST=GetCapabilities");

      Map m = new HashMap();
      m.put(WFSDataStoreFactory.URL.key, url);
      m.put(WFSDataStoreFactory.TIMEOUT.key, new Integer(10000));
      m.put(WFSDataStoreFactory.PROTOCOL.key, Boolean.FALSE);

      DataStore wfs = (new WFSDataStoreFactory()).createNewDataStore(m);
      Query query = new DefaultQuery(wfs.getTypeNames()[1]);
      FeatureReader<SimpleFeatureType, SimpleFeature> ft =
          wfs.getFeatureReader(query, Transaction.AUTO_COMMIT);
      int count = 0;
      while (ft.hasNext()) if (ft.next() != null) count++;
      System.out.println("Found " + count + " features");
    } catch (IOException e) {
      e.printStackTrace();
    } catch (NoSuchElementException e) {
      e.printStackTrace();
    } catch (IllegalAttributeException e) {
      e.printStackTrace();
    }
  }
  /**
   * @param dataStore A ShapeFileDataStore containing geometries to convert.
   * @param keyAttributes The names of attributes to be concatenated to generate record keys.
   * @throws Exception
   */
  public static void convertFeatures(
      GeometryStreamConverter converter, ShapefileDataStore dataStore, List<String> keyAttributes)
      throws Exception {
    SimpleFeatureType schema = dataStore.getSchema();
    int numFeatures = dataStore.getCount(Query.ALL);
    FeatureReader<SimpleFeatureType, SimpleFeature> reader = null;
    try {
      List<AttributeDescriptor> attrDesc = schema.getAttributeDescriptors();
      String header = "\"the_geom_id\", \"the_geom_key\"";
      for (int i = 1; i < attrDesc.size(); i++) {
        String colName = attrDesc.get(i).getLocalName();
        if (GeometryStreamConverter.debugDBF) header += ", \"" + colName + '"';
        // if any specified attribute matches colName, case insensitive, overwrite specified
        // attribute name with name having correct case
        for (int j = 0; j < keyAttributes.size(); j++)
          if (keyAttributes.get(j).equalsIgnoreCase(colName)) keyAttributes.set(j, colName);
      }
      // debug: read schema and print it out
      if (GeometryStreamConverter.debugDBF) System.out.println(header);

      // loop through features and parse them
      long startTime = System.currentTimeMillis(),
          endTime = startTime,
          debugInterval = 60000,
          nextDebugTime = startTime + debugInterval;
      int featureCount = 0;

      reader = dataStore.getFeatureReader();
      CoordinateReferenceSystem projection = schema.getCoordinateReferenceSystem(); // may be null
      String projectionWKT = projection == null ? null : projection.toWKT();

      while (reader.hasNext()) {
        endTime = System.currentTimeMillis();
        if (GeometryStreamConverter.debugTime && endTime > nextDebugTime) {
          System.out.println(
              String.format(
                  "Processing %s/%s features, %s minutes elapsed",
                  featureCount, numFeatures, (endTime - startTime) / 60000.0));
          while (endTime > nextDebugTime) nextDebugTime += debugInterval;
        }
        convertFeature(converter, reader.next(), keyAttributes, projectionWKT);
        featureCount++;
      }

      if (GeometryStreamConverter.debugTime && endTime - startTime > debugInterval)
        System.out.println(
            String.format(
                "Processing %s features completed in %s minutes",
                numFeatures, (endTime - startTime) / 60000.0));
    } catch (OutOfMemoryError e) {
      e.printStackTrace();
      throw e;
    } finally {
      try {
        if (reader != null) reader.close();
      } catch (IOException e) {
      }
    }
  }
Esempio n. 6
0
  private void testVendorParameters(Boolean usePost) throws IOException {
    if (url == null) return;

    Map m = new HashMap();
    m.put(WFSDataStoreFactory.URL.key, url);
    m.put(WFSDataStoreFactory.TIMEOUT.key, new Integer(100000));
    m.put(WFSDataStoreFactory.PROTOCOL.key, usePost);
    WFS_1_0_0_DataStore wfs = (WFS_1_0_0_DataStore) (new WFSDataStoreFactory()).createDataStore(m);

    final WFS100ProtocolHandler originalHandler = wfs.protocolHandler;
    wfs.protocolHandler =
        new WFS100ProtocolHandler(null, originalHandler.getConnectionFactory()) {
          @Override
          protected WFSCapabilities parseCapabilities(InputStream capabilitiesReader)
              throws IOException {
            return originalHandler.getCapabilities();
          }

          @Override
          public ConnectionFactory getConnectionFactory() {
            return new ConnectionFactoryWrapper(super.getConnectionFactory()) {

              @Override
              public HttpURLConnection getConnection(URL query, HttpMethod method)
                  throws IOException {
                String[] keyValueArray = query.getQuery().split("&");
                Map<String, String> kvp = new HashMap<String, String>();
                for (String keyValue : keyValueArray) {
                  String[] skv = keyValue.split("=");
                  kvp.put(skv[0], skv[1]);
                }

                // check the vendor params actually made it into the url
                assertEquals("true", kvp.get("strict"));
                assertEquals("mysecret", kvp.get("authkey"));
                assertEquals("low%3A2000000%3Bhigh%3A5000000", kvp.get("viewparams"));

                return super.getConnection(query, method);
              }
            };
          }
        };

    Map<String, String> vparams = new HashMap<String, String>();
    vparams.put("authkey", "mysecret");
    vparams.put("viewparams", "low:2000000;high:5000000");
    vparams.put("strict", "true");
    Hints hints = new Hints(WFSDataStore.WFS_VENDOR_PARAMETERS, vparams);
    Query q = new Query("topp:states");
    q.setHints(hints);

    // try with
    FeatureReader fr = wfs.getFeatureReader(q, Transaction.AUTO_COMMIT);
    assertTrue(fr.hasNext());
    fr.close();
  }
  public ReferencedEnvelope getBounds() {
    FeatureReader<SimpleFeatureType, SimpleFeature> reader = null;
    try {
      ReferencedEnvelope result = featureSource.getBounds(query);
      if (result != null) {
        return result;
      }

      // ops, we have to compute the results by hand. Let's load just the
      // geometry attributes though
      Query q = new Query(query);
      List<String> geometries = new ArrayList<String>();
      for (AttributeDescriptor ad : getSchema().getAttributeDescriptors()) {
        if (ad instanceof GeometryDescriptor) {
          geometries.add(ad.getLocalName());
        }
      }
      // no geometries, no bounds
      if (geometries.size() == 0) {
        return new ReferencedEnvelope();
      } else {
        q.setPropertyNames(geometries);
      }
      // grab the features and scan through them
      reader = featureSource.getReader(q);
      while (reader.hasNext()) {
        SimpleFeature f = reader.next();
        ReferencedEnvelope featureBounds = ReferencedEnvelope.reference(f.getBounds());
        if (result == null) {
          result = featureBounds;
        } else if (featureBounds != null) {
          result.expandToInclude(featureBounds);
        }
      }
      // return the results if we got any, or return an empty one otherwise
      if (result != null) {
        return result;
      } else {
        return new ReferencedEnvelope(getSchema().getCoordinateReferenceSystem());
      }
    } catch (IOException e) {
      throw new RuntimeException(e);
    } finally {
      if (reader != null) {
        try {
          reader.close();
        } catch (IOException ex) {
          // we tried...
        }
      }
    }
  }
 public List<FeatureId> addFeatures(final FeatureReader<SimpleFeatureType, SimpleFeature> reader)
     throws IOException {
   List features = new ArrayList();
   while (reader.hasNext()) {
     try {
       SimpleFeature next = reader.next();
       features.add(next);
     } catch (Exception e) {
       throw (IOException) new IOException().initCause(e);
     }
   }
   return addFeatures(
       DataUtilities.collection((SimpleFeature[]) features.toArray(new SimpleFeature[0])));
 }
  @Test
  public void testGetFeaturesReader() throws Exception {
    FeatureReader<SimpleFeatureType, SimpleFeature> fr;
    fr = rts.getFeatureReader(new Query(RENAMED), Transaction.AUTO_COMMIT);
    SimpleFeature sf = fr.next();
    fr.close();

    assertEquals(primitive, sf.getFeatureType());

    // check the feature ids have been renamed as well
    assertTrue(
        "Feature id has not been renamed, it's still " + sf.getID(),
        sf.getID().startsWith(RENAMED));
  }
 public void close() {
   try {
     reader.close();
   } catch (IOException e) {
     e.printStackTrace();
   }
 }
 /* (non-Javadoc)
  * @see java.util.Iterator#hasNext()
  */
 public boolean hasNext() {
   try {
     return reader.hasNext();
   } catch (Exception e) {
     return false;
   }
 }
 public Object next() {
   try {
     return delegate.next();
   } catch (IOException e) {
     throw new RuntimeException(e);
   }
 }
 public void close() {
   try {
     delegate.close();
   } catch (IOException e) {
     throw new RuntimeException(e);
   }
 }
 public boolean hasNext() {
   try {
     return delegate.hasNext();
   } catch (IOException e) {
     throw new RuntimeException(e);
   }
 }
 /* (non-Javadoc)
  * @see java.util.Iterator#next()
  */
 public F next() {
   try {
     return reader.next();
   } catch (Exception e) {
     throw new NoSuchElementException("Exception raised during next: " + e.getLocalizedMessage());
   }
 }
 public SimpleFeature next() throws java.util.NoSuchElementException {
   try {
     return delegate.next();
   } catch (IOException e) {
     throw new RuntimeException(e);
   }
 }
 protected Set fids(Filter filter)
     throws NoSuchElementException, IOException, IllegalAttributeException {
   Set fids = new HashSet();
   String typeName = getSchema().getTypeName();
   DefaultQuery query =
       new DefaultQuery(typeName, filter, Integer.MAX_VALUE, Query.ALL_NAMES, "fids");
   FeatureReader<SimpleFeatureType, SimpleFeature> reader =
       getJDBCDataStore().getFeatureReader(query, getTransaction());
   try {
     while (reader.hasNext()) {
       fids.add(reader.next().getID());
     }
   } finally {
     reader.close();
   }
   return fids;
 }
 public int size() {
   FeatureReader fr = null;
   try {
     int size = featureSource.getCount(query);
     if (size >= 0) {
       return size;
     } else {
       // we have to iterate, probably best if we do a minimal query that
       // only loads a short attribute
       AttributeDescriptor chosen = null;
       for (AttributeDescriptor ad : getSchema().getAttributeDescriptors()) {
         if (chosen == null || size(ad) < size(chosen)) {
           chosen = ad;
         }
       }
       // build the minimal query
       Query q = new Query(query);
       if (chosen != null) {
         q.setPropertyNames(Collections.singletonList(chosen.getLocalName()));
       }
       // bean counting...
       fr = featureSource.getReader(q);
       int count = 0;
       while (fr.hasNext()) {
         fr.next();
         count++;
       }
       return count;
     }
   } catch (IOException e) {
     throw new RuntimeException(e);
   } finally {
     if (fr != null) {
       try {
         fr.close();
       } catch (IOException e) {
         throw new RuntimeException(e);
       }
     }
   }
 }
  /**
   * Replace with contents of reader.
   *
   * <p>Equivelent to:
   *
   * <pre><code>
   * FeatureWriter<SimpleFeatureType, SimpleFeature> writer = dataStore.getFeatureWriter( typeName, false, transaction );
   * Feature feature, newFeature;
   * while( writer.hasNext() ){
   *    feature = writer.next();
   *    writer.remove();
   * }
   * while( reader.hasNext() ){
   *    newFeature = reader.next();
   *    feature = writer.next();
   *    newFeature.setAttributes( feature.getAttributes( null ) );
   *    writer.write();
   * }
   * reader.close();
   * writer.close();
   * </code>
   * </pre>
   *
   * <p>Subclasses may override this method to perform the appropriate optimization for this result.
   *
   * @param reader Contents to replace with
   * @throws IOException
   */
  public void setFeatures(FeatureReader<SimpleFeatureType, SimpleFeature> reader)
      throws IOException {
    String typeName = getSchema().getTypeName();
    FeatureWriter<SimpleFeatureType, SimpleFeature> writer =
        getDataStore().getFeatureWriter(typeName, getTransaction());
    SimpleFeature feature;
    SimpleFeature newFeature;

    try {
      while (writer.hasNext()) {
        feature = writer.next();
        LOGGER.finer("removing feature " + feature);
        writer.remove();
      }

      while (reader.hasNext()) {
        try {
          feature = reader.next();
        } catch (Exception readProblem) {
          throw new DataSourceException(
              "Could not add Features, problem with provided reader", readProblem);
        }

        newFeature = (SimpleFeature) writer.next();

        try {
          newFeature.setAttributes(feature.getAttributes());
        } catch (IllegalAttributeException writeProblem) {
          throw new DataSourceException(
              "Could not create " + typeName + " out of provided feature: " + feature.getID(),
              writeProblem);
        }
        LOGGER.finer("writing feature " + newFeature);
        writer.write();
      }
    } finally {
      reader.close();
      writer.close();
    }
  }
 /**
  * Test that a filter query issued.
  *
  * @param filter filter to be passed in the query to determine the subset of features to be
  *     returned, or null for all features
  * @param expectedFeatureIds integer id for returned features, matching the expected row ids
  * @throws Exception
  */
 private void runFilterTest(Filter filter, int[] expectedFeatureIds) throws Exception {
   DataStore datastore = null;
   try {
     datastore = DataStoreFinder.getDataStore(getParams());
     assertNotNull(datastore);
     Query query = new DefaultQuery(TEST_TABLE_NAME, filter);
     FeatureReader<SimpleFeatureType, SimpleFeature> reader = null;
     try {
       /*
        * List of all the integer feature ids seen in the features returned for this query.
        */
       List<Integer> featureIds = new ArrayList<Integer>();
       reader = datastore.getFeatureReader(query, Transaction.AUTO_COMMIT);
       for (SimpleFeature feature = null; reader.hasNext(); ) {
         feature = reader.next();
         /*
          * Convert long feature id of the form test_table_name.1234 to the numeric id
          * used when creating the row. This relies on the behaviour of the postgis fid
          * mapper.
          */
         Integer id = Integer.valueOf(feature.getID().replace(TEST_TABLE_NAME + ".", ""));
         featureIds.add(id);
       }
       /*
        * The query succeeded as expected if and only if the sorted lists of returned and
        * expected ids are equal. The expected ids are converted to a List of Integers to
        * leverage Collections comparison.
        */
       assertEquals(sortedList(expectedFeatureIds), sorted(featureIds));
     } finally {
       if (reader != null) {
         reader.close();
       }
     }
   } finally {
     if (datastore != null) {
       datastore.dispose();
     }
   }
 }
Esempio n. 21
0
  /**
   * reads the completechain dataset
   *
   * @throws Exception
   */
  public void getrows() throws Exception {

    // SELECT ... FROM ... WHERE
    //   (statel=state and countyl=county) OR (stater=state and countyr=county )
    FilterFactory ff = FilterFactory.createFilterFactory();

    CompareFilter cf1 = ff.createCompareFilter(FilterType.COMPARE_EQUALS);
    cf1.addLeftValue(ff.createAttributeExpression(null, "statel"));
    cf1.addRightValue(ff.createLiteralExpression(state));

    CompareFilter cf2 = ff.createCompareFilter(FilterType.COMPARE_EQUALS);
    cf2.addLeftValue(ff.createAttributeExpression(null, "countyl"));
    cf2.addRightValue(ff.createLiteralExpression(county));

    LogicFilter and1 = ff.createLogicFilter(cf1, cf2, Filter.LOGIC_AND);

    CompareFilter cf3 = ff.createCompareFilter(FilterType.COMPARE_EQUALS);
    cf3.addLeftValue(ff.createAttributeExpression(null, "stater"));
    cf3.addRightValue(ff.createLiteralExpression(state));

    CompareFilter cf4 = ff.createCompareFilter(FilterType.COMPARE_EQUALS);
    cf4.addLeftValue(ff.createAttributeExpression(null, "countyr"));
    cf4.addRightValue(ff.createLiteralExpression(county));

    LogicFilter and2 = ff.createLogicFilter(cf3, cf4, Filter.LOGIC_AND);

    LogicFilter or = ff.createLogicFilter(and1, and2, Filter.LOGIC_OR);

    String[] ps = new String[] {"wkb_geometry", "statel", "countyl", "stater", "countyr"};
    Query q = new DefaultQuery("county_boundary", or, ps);

    FeatureReader fr = ds.getFeatureReader(q, new DefaultTransaction());

    while (fr.hasNext()) {
      Feature f = fr.next();
      if (!alreadyThere(f.getDefaultGeometry())) lines.add(f.getDefaultGeometry());
    }
    fr.close();
  }
  /**
   * Add Features from reader to this FeatureStore.
   *
   * <p>Equivelent to:
   *
   * <pre><code>
   * Set set = new HashSet();
   * FeatureWriter<SimpleFeatureType, SimpleFeature> writer = dataStore.getFeatureWriter( typeName, true, transaction );
   * Featrue feature, newFeature;
   * while( reader.hasNext() ){
   *    feature = reader.next();
   *    newFeature = writer.next();
   *    newFeature.setAttributes( feature.getAttribtues( null ) );
   *    writer.write();
   *    set.add( newfeature.getID() );
   * }
   * reader.close();
   * writer.close();
   *
   * return set;
   * </code>
   * </pre>
   *
   * <p>(If you don't have a FeatureReader<SimpleFeatureType, SimpleFeature> handy
   * DataUtilities.reader() may be able to help out)
   *
   * <p>Subclasses may override this method to perform the appropriate optimization for this result.
   *
   * @param reader
   * @return The Set of FeatureIDs added
   * @throws IOException
   * @see org.geotools.data.FeatureStore#addFeatures(org.geotools.data.FeatureReader)
   */
  public Set addFeatures(FeatureReader<SimpleFeatureType, SimpleFeature> reader)
      throws IOException {
    Set addedFids = new HashSet();
    String typeName = getSchema().getTypeName();
    SimpleFeature feature = null;
    SimpleFeature newFeature;
    FeatureWriter<SimpleFeatureType, SimpleFeature> writer =
        getDataStore().getFeatureWriterAppend(typeName, getTransaction());

    try {
      while (reader.hasNext()) {
        try {
          feature = reader.next();
        } catch (Exception e) {
          throw new DataSourceException("Could not add Features, problem with provided reader", e);
        }

        newFeature = (SimpleFeature) writer.next();

        try {
          newFeature.setAttributes(feature.getAttributes());
        } catch (IllegalAttributeException writeProblem) {
          throw new DataSourceException(
              "Could not create " + typeName + " out of provided feature: " + feature.getID(),
              writeProblem);
        }

        writer.write();
        addedFids.add(newFeature.getID());
      }
    } finally {
      reader.close();
      writer.close();
    }
    return addedFids;
  }
 @Test
 public void testFeatureReaderFidFilter() throws Exception {
   FeatureReader<SimpleFeatureType, SimpleFeature> fr;
   fr = rts.getFeatureReader(new Query(RENAMED, fidFilter), Transaction.AUTO_COMMIT);
   assertEquals(primitive, fr.getFeatureType());
   assertTrue(fr.hasNext());
   SimpleFeature sf = fr.next();
   assertFalse(fr.hasNext());
   fr.close();
   assertEquals(fid, sf.getID());
 }
  public FeatureCollection<SimpleFeatureType, SimpleFeature> getLog(
      String fromVersion, String toVersion, Filter filter, String[] userIds, int maxRows)
      throws IOException {
    if (filter == null) filter = Filter.INCLUDE;
    RevisionInfo r1 = new RevisionInfo(fromVersion);
    RevisionInfo r2 = new RevisionInfo(toVersion);

    boolean swapped = false;
    if (r1.revision > r2.revision) {
      // swap them
      RevisionInfo tmpr = r1;
      r1 = r2;
      r2 = tmpr;
      String tmps = toVersion;
      toVersion = fromVersion;
      fromVersion = tmps;
      swapped = true;
    }

    // We implement this exactly as described. Happily, it seems Postgis does not have
    // sql lentgh limitations. Yet, if would be a lot better if we could encode this
    // as a single sql query with subqueries... (but not all filters are encodable...)
    ModifiedFeatureIds mfids =
        store.getModifiedFeatureFIDs(
            schema.getTypeName(), fromVersion, toVersion, filter, userIds, getTransaction());
    Set ids = new HashSet(mfids.getCreated());
    ids.addAll(mfids.getDeleted());
    ids.addAll(mfids.getModified());

    // grab the eventually modified revisions from mfids
    r1 = mfids.fromRevision;
    r2 = mfids.toRevision;

    // no changes?
    if (ids.isEmpty()) return new EmptyFeatureCollection(schema);

    // Create a filter that sounds like:
    // (revision > r1 and revision <= r2) or (expired > r1 and expired <= r2) and fid in
    // (fidlist)
    FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);
    Filter fidFilter = store.buildFidFilter(ids);
    Filter transformedFidFilter = store.transformFidFilter(schema.getTypeName(), fidFilter);
    Filter revGrR1 = ff.greater(ff.property("revision"), ff.literal(r1.revision));
    Filter revLeR2 = ff.lessOrEqual(ff.property("revision"), ff.literal(r2.revision));
    Filter expGrR1 = ff.greater(ff.property("expired"), ff.literal(r1.revision));
    Filter expLeR2 = ff.lessOrEqual(ff.property("expired"), ff.literal(r2.revision));
    Filter versionFilter =
        ff.and(transformedFidFilter, ff.or(ff.and(revGrR1, revLeR2), ff.and(expGrR1, expLeR2)));

    // We just want the revision and expired, build a query against the real feature type
    DefaultQuery q =
        new DefaultQuery(schema.getTypeName(), versionFilter, new String[] {"revision", "expired"});
    FeatureReader<SimpleFeatureType, SimpleFeature> fr = null;
    SortedSet revisions = new TreeSet();
    try {
      fr = store.wrapped.getFeatureReader(q, getTransaction());
      while (fr.hasNext()) {
        SimpleFeature f = fr.next();
        Long revision = (Long) f.getAttribute(0);
        if (revision.longValue() > r1.revision) revisions.add(revision);
        Long expired = (Long) f.getAttribute(1);
        if (expired.longValue() != Long.MAX_VALUE && expired.longValue() > r1.revision)
          revisions.add(expired);
      }
    } catch (Exception e) {
      throw new DataSourceException("Error reading modified revisions from datastore", e);
    } finally {
      if (fr != null) fr.close();
    }

    // now, we have a list of revisions between a min and a max
    // let's try to build a fid filter with revisions from the biggest to the smallest
    Set revisionIdSet = new HashSet();
    for (Iterator it = revisions.iterator(); it.hasNext(); ) {
      Long rev = (Long) it.next();
      revisionIdSet.add(
          ff.featureId(VersionedPostgisDataStore.TBL_CHANGESETS + "." + rev.toString()));
    }
    if (revisionIdSet.isEmpty()) return new EmptyFeatureCollection(schema);
    Filter revisionFilter = ff.id(revisionIdSet);

    // return the changelog
    // TODO: sort on revision descending. Unfortunately, to do so we have to fix fid mappers,
    // so that auto-increment can return revision among the attributes, and at the same
    // time simply allow not include fid attributes in the insert queries (or provide a
    // "default"
    // value for them).
    FeatureSource<SimpleFeatureType, SimpleFeature> changesets =
        (FeatureSource<SimpleFeatureType, SimpleFeature>)
            store.getFeatureSource(VersionedPostgisDataStore.TBL_CHANGESETS);
    DefaultQuery sq = new DefaultQuery();
    sq.setFilter(revisionFilter);
    final SortOrder order = swapped ? SortOrder.ASCENDING : SortOrder.DESCENDING;
    sq.setSortBy(new SortBy[] {ff.sort("revision", order)});
    if (maxRows > 0) sq.setMaxFeatures(maxRows);
    return changesets.getFeatures(sq);
  }
 public boolean hasNext() throws IOException {
   return delegate.hasNext();
 }
Esempio n. 26
0
  /**
   * Writing test that only engages against a remote geoserver.
   *
   * <p>Makes reference to the standard featureTypes that geoserver ships with. NOTE: Ignoring this
   * test for now because it edits topp:states and GeoServer doesn't return the correct Feature IDs
   * on transactions against shapefiles
   */
  @Test
  @Ignore
  public void testWrite()
      throws NoSuchElementException, IllegalFilterException, IOException,
          IllegalAttributeException {
    if (url == null) return;

    Map m = new HashMap();
    m.put(WFSDataStoreFactory.URL.key, url);
    m.put(WFSDataStoreFactory.TIMEOUT.key, new Integer(10000000));
    DataStore post = (WFS_1_0_0_DataStore) (new WFSDataStoreFactory()).createDataStore(m);
    String typename = TO_EDIT_TYPE;
    SimpleFeatureType ft = post.getSchema(typename);
    SimpleFeatureSource fs = post.getFeatureSource(typename);
    class Watcher implements FeatureListener {
      public int count = 0;

      public void changed(FeatureEvent featureEvent) {
        System.out.println("Event " + featureEvent);
        count++;
      }
    }
    Watcher watcher = new Watcher();
    fs.addFeatureListener(watcher);

    Id startingFeatures = createFidFilter(fs);
    FilterFactory2 filterFac = CommonFactoryFinder.getFilterFactory2(GeoTools.getDefaultHints());
    try {
      GeometryFactory gf = new GeometryFactory();
      MultiPolygon mp =
          gf.createMultiPolygon(
              new Polygon[] {
                gf.createPolygon(
                    gf.createLinearRing(
                        new Coordinate[] {
                          new Coordinate(-88.071564, 37.51099),
                          new Coordinate(-88.467644, 37.400757),
                          new Coordinate(-90.638329, 42.509361),
                          new Coordinate(-89.834618, 42.50346),
                          new Coordinate(-88.071564, 37.51099)
                        }),
                    new LinearRing[] {})
              });
      mp.setUserData("http://www.opengis.net/gml/srs/epsg.xml#" + EPSG_CODE);

      PropertyName geometryAttributeExpression =
          filterFac.property(ft.getGeometryDescriptor().getLocalName());
      PropertyIsNull geomNullCheck = filterFac.isNull(geometryAttributeExpression);
      Query query = new Query(typename, filterFac.not(geomNullCheck), 1, Query.ALL_NAMES, null);
      SimpleFeatureIterator inStore = fs.getFeatures(query).features();

      SimpleFeature f, f2;
      try {
        SimpleFeature feature = inStore.next();

        SimpleFeature copy = SimpleFeatureBuilder.deep(feature);
        SimpleFeature copy2 = SimpleFeatureBuilder.deep(feature);

        f = SimpleFeatureBuilder.build(ft, copy.getAttributes(), null);
        f2 = SimpleFeatureBuilder.build(ft, copy2.getAttributes(), null);
        assertFalse("Max Feature failed", inStore.hasNext());
      } finally {
        inStore.close();
      }

      org.geotools.util.logging.Logging.getLogger("org.geotools.data.wfs").setLevel(Level.FINE);
      SimpleFeatureCollection inserts = DataUtilities.collection(new SimpleFeature[] {f, f2});
      Id fp = WFSDataStoreWriteOnlineTest.doInsert(post, ft, inserts);

      // / okay now count ...
      FeatureReader<SimpleFeatureType, SimpleFeature> count =
          post.getFeatureReader(new Query(ft.getTypeName()), Transaction.AUTO_COMMIT);
      int i = 0;
      while (count.hasNext() && i < 3) {
        f = count.next();
        i++;
      }
      count.close();

      WFSDataStoreWriteOnlineTest.doDelete(post, ft, fp);
      WFSDataStoreWriteOnlineTest.doUpdate(post, ft, ATTRIBUTE_TO_EDIT, NEW_EDIT_VALUE);
      // assertFalse("events not fired", watcher.count == 0);
    } finally {
      try {
        ((SimpleFeatureStore) fs).removeFeatures(filterFac.not(startingFeatures));
      } catch (Exception e) {
        System.out.println(e);
      }
    }
  }
 public SimpleFeature next()
     throws IOException, IllegalAttributeException, NoSuchElementException {
   return RetypingFeatureCollection.retype(delegate.next(), builder);
 }
 public void close() throws IOException {
   delegate.close();
   delegate = null;
   builder = null;
 }
  public void rollback(String toVersion, Filter filter, String[] userIds) throws IOException {
    // TODO: build an optimized version of this that can do the same work with a couple
    // of queries assuming the filter is fully encodable

    Transaction t = getTransaction();
    boolean autoCommit = false;
    if (Transaction.AUTO_COMMIT.equals(t)) {
      t = new DefaultTransaction();
      autoCommit = true;
    }

    // Gather feature modified after toVersion
    ModifiedFeatureIds mfids =
        store.getModifiedFeatureFIDs(schema.getTypeName(), toVersion, null, filter, userIds, t);
    FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);

    // grab the state, we need to mark as dirty all the features we are going to modify/re-insert
    VersionedJdbcTransactionState state = store.wrapped.getVersionedJdbcTransactionState(t);

    // remove all features that have been created and not deleted
    Set fidsToRemove = new HashSet(mfids.getCreated());
    fidsToRemove.removeAll(mfids.getDeleted());
    if (!fidsToRemove.isEmpty()) {
      removeFeatures(store.buildFidFilter(fidsToRemove));
      state.setTypeNameDirty(getSchema().getTypeName());
    }

    // reinstate all features that were there before toVersion and that
    // have been deleted after it. Notice this is an insertion, so to preserve
    // the fids I have to use low level writers where I can set all attributes manually
    // (we work on the assumption the wrapped data store maps all attributes of the primary
    // key in the feature itself)
    Set fidsToRecreate = new HashSet(mfids.getDeleted());
    fidsToRecreate.removeAll(mfids.getCreated());
    if (!fidsToRecreate.isEmpty()) {
      state.setTypeNameDirty(getSchema().getTypeName());
      state.setFidsDirty(getSchema().getTypeName(), fidsToRecreate);

      long revision = store.wrapped.getVersionedJdbcTransactionState(t).getRevision();
      Filter recreateFilter =
          store.buildVersionedFilter(
              schema.getTypeName(), store.buildFidFilter(fidsToRecreate), mfids.fromRevision);
      FeatureReader<SimpleFeatureType, SimpleFeature> fr = null;
      FeatureWriter<SimpleFeatureType, SimpleFeature> fw = null;
      try {
        DefaultQuery q = new DefaultQuery(schema.getTypeName(), recreateFilter);
        fr = store.wrapped.getFeatureReader(q, t);
        fw = store.wrapped.getFeatureWriterAppend(schema.getTypeName(), t);
        while (fr.hasNext()) {
          SimpleFeature original = fr.next();
          SimpleFeature restored = fw.next();
          for (int i = 0; i < original.getFeatureType().getAttributeCount(); i++) {
            restored.setAttribute(i, original.getAttribute(i));
          }
          restored.setAttribute("revision", new Long(revision));
          restored.setAttribute("expired", new Long(Long.MAX_VALUE));
          fw.write();
        }
      } catch (IllegalAttributeException iae) {
        throw new DataSourceException(
            "Unexpected error occurred while " + "restoring deleted featues", iae);
      } finally {
        if (fr != null) fr.close();
        if (fw != null) fw.close();
      }
    }

    // Now onto the modified features, that were there, and still are there.
    // Since we cannot get a sorted writer we have to do a kind of inner loop scan
    // (note, a parellel scan of similarly sorted reader and writer would be more
    // efficient, but writer sorting is not there...)
    // Here it's possible to work against the external API, thought it would be more
    // efficient (but more complex) to work against the wrapped one.
    if (!mfids.getModified().isEmpty()) {
      state.setTypeNameDirty(getSchema().getTypeName());
      state.setFidsDirty(getSchema().getTypeName(), mfids.getModified());

      Filter modifiedIdFilter = store.buildFidFilter(mfids.getModified());
      Filter mifCurrent =
          store.buildVersionedFilter(schema.getTypeName(), modifiedIdFilter, new RevisionInfo());
      FeatureReader<SimpleFeatureType, SimpleFeature> fr = null;
      FeatureWriter<SimpleFeatureType, SimpleFeature> fw = null;
      try {
        fw = store.getFeatureWriter(schema.getTypeName(), mifCurrent, t);
        while (fw.hasNext()) {
          SimpleFeature current = fw.next();
          Filter currIdFilter = ff.id(Collections.singleton(ff.featureId(current.getID())));
          Filter cidToVersion =
              store.buildVersionedFilter(schema.getTypeName(), currIdFilter, mfids.fromRevision);
          DefaultQuery q = new DefaultQuery(schema.getTypeName(), cidToVersion);
          q.setVersion(mfids.fromRevision.toString());
          fr = store.getFeatureReader(q, t);
          SimpleFeature original = fr.next();
          for (int i = 0; i < original.getFeatureType().getAttributeCount(); i++) {
            current.setAttribute(i, original.getAttribute(i));
          }
          fr.close();
          fw.write();
        }
      } catch (IllegalAttributeException iae) {
        throw new DataSourceException(
            "Unexpected error occurred while " + "restoring deleted featues", iae);
      } finally {
        if (fr != null) fr.close();
        if (fw != null) fw.close();
      }
    }

    // if it's auto commit, don't forget to actually commit
    if (autoCommit) {
      t.commit();
      t.close();
    }
  }