Esempio n. 1
0
  /**
   * Stages the object addressed by {@code pathFilter}, or all unstaged objects if {@code pathFilter
   * == null} to be added, if it is/they are marked as an unstaged change. Does nothing otherwise.
   *
   * <p>To stage changes not yet staged, a diff tree walk is performed using the current staged
   * {@link RevTree} as the old object and the current unstaged {@link RevTree} as the new object.
   * Then all the differences are traversed and the staged tree is updated with the changes reported
   * by the diff walk (neat).
   *
   * @param progress the progress listener for this process
   * @param pathFilter the filter to use
   */
  public void stage(final ProgressListener progress, final @Nullable String pathFilter) {

    // short cut for the case where the index is empty and we're staging all changes in the
    // working tree, so it's just a matter of updating the index ref to working tree RevTree id
    if (null == pathFilter
        && !index().getStaged(null).hasNext()
        && !updateOnly
        && index().countConflicted(null) == 0) {
      progress.started();
      Optional<ObjectId> workHead = command(RevParse.class).setRefSpec(Ref.WORK_HEAD).call();
      if (workHead.isPresent()) {
        command(UpdateRef.class).setName(Ref.STAGE_HEAD).setNewValue(workHead.get()).call();
      }
      progress.setProgress(100f);
      progress.complete();
      return;
    }

    final long numChanges = workingTree().countUnstaged(pathFilter).count();

    Iterator<DiffEntry> unstaged = workingTree().getUnstaged(pathFilter);

    if (updateOnly) {
      unstaged =
          Iterators.filter(
              unstaged,
              new Predicate<DiffEntry>() {
                @Override
                public boolean apply(@Nullable DiffEntry input) {
                  // HACK: avoid reporting changed trees
                  if (input.isChange() && input.getOldObject().getType().equals(TYPE.TREE)) {
                    return false;
                  }
                  return input.getOldObject() != null;
                }
              });
    }

    index().stage(progress, unstaged, numChanges);

    List<Conflict> conflicts = index().getConflicted(pathFilter);
    ConflictsDatabase conflictsDatabase = conflictsDatabase();
    for (Conflict conflict : conflicts) {
      // if we are staging unmerged files, the conflict should get solved. However, if the
      // working index object is the same as the staging area one (for instance, after running
      // checkout --ours), it will not be reported by the getUnstaged method. We solve that
      // here.
      conflictsDatabase.removeConflict(null, conflict.getPath());
    }
  }
Esempio n. 2
0
  /**
   * Executes the export operation using the parameters that have been specified.
   *
   * @return a FeatureCollection with the specified features
   */
  @Override
  protected SimpleFeatureStore _call() {
    final ObjectDatabase database = objectDatabase();
    if (filterFeatureTypeId != null) {
      RevObject filterType = database.getIfPresent(filterFeatureTypeId);
      checkArgument(
          filterType instanceof RevFeatureType, "Provided filter feature type is does not exist");
    }

    final SimpleFeatureStore targetStore = getTargetStore();

    final String refspec = resolveRefSpec();
    final String treePath = refspec.substring(refspec.indexOf(':') + 1);
    final RevTree rootTree = resolveRootTree(refspec);
    final NodeRef typeTreeRef = resolTypeTreeRef(refspec, treePath, rootTree);

    final ObjectId defaultMetadataId = typeTreeRef.getMetadataId();

    final RevTree typeTree = database.getTree(typeTreeRef.getObjectId());

    final ProgressListener progressListener = getProgressListener();

    progressListener.started();
    progressListener.setDescription(
        "Exporting from " + path + " to " + targetStore.getName().getLocalPart() + "... ");

    final Iterator<SimpleFeature> filtered;
    {
      final Iterator<SimpleFeature> plainFeatures =
          getFeatures(typeTree, database, defaultMetadataId, progressListener);

      Iterator<SimpleFeature> adaptedFeatures = adaptToArguments(plainFeatures, defaultMetadataId);

      Iterator<Optional<Feature>> transformed =
          Iterators.transform(adaptedFeatures, ExportOp.this.function);

      Iterator<SimpleFeature> result =
          Iterators.filter(
              Iterators.transform(
                  transformed,
                  new Function<Optional<Feature>, SimpleFeature>() {
                    @Override
                    public SimpleFeature apply(Optional<Feature> input) {
                      return (SimpleFeature) input.orNull();
                    }
                  }),
              Predicates.notNull());

      // check the resulting schema has something to contribute
      PeekingIterator<SimpleFeature> peekingIt = Iterators.peekingIterator(result);
      if (peekingIt.hasNext()) {
        Function<AttributeDescriptor, String> toString =
            new Function<AttributeDescriptor, String>() {
              @Override
              public String apply(AttributeDescriptor input) {
                return input.getLocalName();
              }
            };
        SimpleFeature peek = peekingIt.peek();
        Set<String> sourceAtts =
            new HashSet<String>(
                Lists.transform(peek.getFeatureType().getAttributeDescriptors(), toString));
        Set<String> targetAtts =
            new HashSet<String>(
                Lists.transform(targetStore.getSchema().getAttributeDescriptors(), toString));
        if (Sets.intersection(sourceAtts, targetAtts).isEmpty()) {
          throw new GeoToolsOpException(
              StatusCode.UNABLE_TO_ADD,
              "No common attributes between source and target feature types");
        }
      }

      filtered = peekingIt;
    }
    FeatureCollection<SimpleFeatureType, SimpleFeature> asFeatureCollection =
        new BaseFeatureCollection<SimpleFeatureType, SimpleFeature>() {

          @Override
          public FeatureIterator<SimpleFeature> features() {

            return new DelegateFeatureIterator<SimpleFeature>(filtered);
          }
        };

    // add the feature collection to the feature store
    final Transaction transaction;
    if (transactional) {
      transaction = new DefaultTransaction("create");
    } else {
      transaction = Transaction.AUTO_COMMIT;
    }
    try {
      targetStore.setTransaction(transaction);
      try {
        targetStore.addFeatures(asFeatureCollection);
        transaction.commit();
      } catch (final Exception e) {
        if (transactional) {
          transaction.rollback();
        }
        Throwables.propagateIfInstanceOf(e, GeoToolsOpException.class);
        throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD);
      } finally {
        transaction.close();
      }
    } catch (IOException e) {
      throw new GeoToolsOpException(e, StatusCode.UNABLE_TO_ADD);
    }

    progressListener.complete();

    return targetStore;
  }