@Test
  public void testAttributeReaderIndexed() throws IOException {
    URL u = TestData.url(TestCaseSupport.class, SHPFILE);
    File shpFile = DataUtilities.urlToFile(u);

    // open the test shapefile
    // creates both indexed and regular shapefile data store
    ShapefileDataStore indexedstore = new ShapefileDataStore(shpFile.toURI().toURL());

    // get a random feature id from one of the stores
    SimpleFeatureIterator it = indexedstore.getFeatureSource().getFeatures().features();
    FeatureId fid = it.next().getIdentifier();
    it.close();

    // query the datastore
    FilterFactory ff = CommonFactoryFinder.getFilterFactory(null);
    Filter idFilter = ff.id(Collections.singleton(fid));
    final Query query =
        new Query(
            indexedstore.getSchema().getName().getLocalPart(), idFilter, new String[] {testColumn});
    final SimpleFeatureCollection indexedfeatures =
        indexedstore.getFeatureSource().getFeatures(query);

    // compare the results
    SimpleFeatureIterator indexIterator = indexedfeatures.features();
    SimpleFeature indexedFeature = indexIterator.next();
    indexIterator.close();

    // get the value of the duplicate column & compare it against expectation
    assertEquals(expectedValue, indexedFeature.getAttribute(testColumn));

    // cleanup
    indexedstore.dispose();
  }
  @Test
  public void testAttributeReader() throws IOException {
    URL u = TestData.url(TestCaseSupport.class, SHPFILE);
    File shpFile = DataUtilities.urlToFile(u);

    // open the test shapefile
    ShapefileDataStore store = new ShapefileDataStore(shpFile.toURI().toURL());
    SimpleFeatureSource source = store.getFeatureSource();

    // read the first feature
    SimpleFeatureIterator iter = source.getFeatures().features();
    SimpleFeature feature = iter.next();
    iter.close();

    // get the value of the duplicate column & compare it against expectation
    assertEquals(expectedValue, feature.getAttribute(testColumn));

    // cleanup
    store.dispose();
  }
  @Override
  protected FeatureWriter<SimpleFeatureType, SimpleFeature> getWriterInternal(
      Query query, int flags) throws IOException {
    if (flags == 0) {
      throw new IllegalArgumentException("no write flags set");
    }

    ShapefileFeatureReader reader = (ShapefileFeatureReader) delegate.getReaderInternal(Query.ALL);
    FeatureWriter<SimpleFeatureType, SimpleFeature> writer;
    ShapefileDataStore ds = getDataStore();
    if (ds.indexManager.hasFidIndex(false)
        || ds.isFidIndexed() && ds.indexManager.hasFidIndex(true)) {
      writer =
          new IndexedShapefileFeatureWriter(
              ds.indexManager, reader, ds.getCharset(), ds.getTimeZone());
    } else {
      writer =
          new ShapefileFeatureWriter(delegate.shpFiles, reader, ds.getCharset(), ds.getTimeZone());
    }

    // if we only have to add move to the end.
    // TODO: just make the code transfer the bytes in bulk instead and start actual writing at
    // the end
    if ((flags | WRITER_ADD) == WRITER_ADD) {
      while (writer.hasNext()) {
        writer.next();
      }
    }

    // if we are filtering wrap the writer so that it returns only the selected features
    // but writes down the mall
    Filter filter = query.getFilter();
    if (filter != null && !Filter.INCLUDE.equals(filter)) {
      writer = new FilteringFeatureWriter(writer, filter);
    }

    return writer;
  }