Exemple #1
0
  public void testValuesRetainAll() {
    final Map<K, V> map;
    try {
      map = makePopulatedMap();
    } catch (UnsupportedOperationException e) {
      return;
    }

    Collection<V> valueCollection = map.values();
    Set<V> valuesToRetain = singleton(valueCollection.iterator().next());
    if (supportsRemove) {
      valueCollection.retainAll(valuesToRetain);
      for (V value : valuesToRetain) {
        assertTrue(valueCollection.contains(value));
      }
      for (V value : valueCollection) {
        assertTrue(valuesToRetain.contains(value));
      }
    } else {
      try {
        valueCollection.retainAll(valuesToRetain);
        fail("Expected UnsupportedOperationException.");
      } catch (UnsupportedOperationException e) {
        // Expected.
      }
    }
    assertInvariants(map);
  }
Exemple #2
0
  public void testValuesRetainAllNullFromEmpty() {
    final Map<K, V> map;
    try {
      map = makeEmptyMap();
    } catch (UnsupportedOperationException e) {
      return;
    }

    Collection<V> values = map.values();
    if (supportsRemove) {
      try {
        values.retainAll(null);
        // Returning successfully is not ideal, but tolerated.
      } catch (NullPointerException e) {
        // Expected.
      }
    } else {
      try {
        values.retainAll(null);
        // We have to tolerate a successful return (Sun bug 4802647)
      } catch (UnsupportedOperationException e) {
        // Expected.
      } catch (NullPointerException e) {
        // Expected.
      }
    }
    assertInvariants(map);
  }
  // Kien added this mathRolesAnhType1AndType2
  public Collection<EnforcedRelation> matchRolesAndType1AndType2(
      TIntHashSet candidateRoles, TIntHashSet candidateType1, TIntHashSet candidateType2) {
    TIntIterator roleKeyI = candidateRoles.iterator();

    int firstRoleKey = roleKeyI.next();

    if (rolesIndex.get(firstRoleKey) == null) {
      return Collections.emptyList();
    }
    Collection<EnforcedRelation> result =
        new ArrayList<EnforcedRelation>(rolesIndex.get(firstRoleKey));

    if (result.isEmpty()) {
      return Collections.emptyList();
    }

    while (roleKeyI.hasNext()) {
      int key = roleKeyI.next();

      if (result.isEmpty()) return result;

      Collection<EnforcedRelation> rest = rolesIndex.get(key);
      if (rest == null) {
        return Collections.emptyList();
      }
      result.retainAll(rest);
    }

    TIntIterator type1KeyI = candidateType1.iterator();
    while (type1KeyI.hasNext()) {
      int key = type1KeyI.next();

      if (result.isEmpty()) return result;

      Collection<EnforcedRelation> rest = type1Index.get(key);
      if (rest == null) {
        return Collections.emptyList();
      }
      result.retainAll(rest);
    }
    TIntIterator type2KeyI = candidateType2.iterator();
    // Old code: while (roleKeyI.hasNext()) {
    // Kien changed roleKeyI to type2KeyI
    while (type2KeyI.hasNext()) {
      int key = type2KeyI.next();

      if (result.isEmpty()) return result;

      Collection<EnforcedRelation> rest2 = type2Index.get(key);
      if (rest2 == null) {
        return Collections.emptyList();
      }
      result.retainAll(rest2);
    }
    return result;
  }
  public void testAddRemoveAllAbilities() throws Exception {
    Collection half = randomPiece(features);
    Collection otherHalf = DataUtilities.list(features);

    if (features instanceof Collection) {
      Collection<SimpleFeature> collection = (Collection<SimpleFeature>) features;

      otherHalf.removeAll(half);
      collection.removeAll(half);
      assertTrue(features.containsAll(otherHalf));
      assertTrue(!features.containsAll(half));
      collection.removeAll(otherHalf);
      assertTrue(features.size() == 0);
      collection.addAll(half);
      assertTrue(features.containsAll(half));
      collection.addAll(otherHalf);
      assertTrue(features.containsAll(otherHalf));
      collection.retainAll(otherHalf);
      assertTrue(features.containsAll(otherHalf));
      assertTrue(!features.containsAll(half));
      collection.addAll(otherHalf);
      Iterator<SimpleFeature> i = collection.iterator();
      while (i.hasNext()) {
        i.next();
        i.remove();
      }
      assertEquals(features.size(), 0);

      SimpleFeatureTypeBuilder tb = new SimpleFeatureTypeBuilder();
      tb.setName("XXX");
      SimpleFeatureBuilder b = new SimpleFeatureBuilder(tb.buildFeatureType());

      assertTrue(!collection.remove(b.buildFeature(null)));
    }
  }
  /** Method collectReplacementNodeIsPreservedNodeInfo */
  private void collectReplacementNodeIsPreservedNodeInfo() {
    replacementNodeIsPreservedNode = new int[n_graph_actions][max_n_replacement_nodes];

    // init the array with -1
    for (int i = 0; i < n_graph_actions; i++)
      for (int j = 0; j < max_n_replacement_nodes; j++) replacementNodeIsPreservedNode[i][j] = -1;

    // for all nodes preserved set the corresponding array entry to the
    // appropriate pattern node number
    for (Rule action : actionRuleMap.keySet()) {
      int act_id = actionRuleMap.get(action).intValue();

      if (action.getRight() != null) {
        // compute the set of replacement nodes preserved by this action
        Collection<Node> replacement_nodes_preserved = new HashSet<Node>();
        replacement_nodes_preserved.addAll(action.getRight().getNodes());
        replacement_nodes_preserved.retainAll(action.getPattern().getNodes());

        // for all those preserved replacement nodes store the
        // corresponding pattern node
        for (Node node : replacement_nodes_preserved) {
          int node_num = replacement_node_num.get(act_id).get(node).intValue();
          replacementNodeIsPreservedNode[act_id][node_num] =
              pattern_node_num.get(act_id).get(node).intValue();
        }
      }
    }
  }
Exemple #6
0
  /**
   * returns docs with ALL given sentiments. special cases: sentiments can be an array of length 1
   * and be "None", in which case all documents with no sentiments are returned. special cases:
   * sentiments can be an array of length 1 and be "all", in which case all documents with any
   * sentiments are returned.
   *
   * @param captions
   */
  public Collection<Document> getDocsWithSentiments(
      String sentiments[],
      Indexer indexer,
      Collection<Document> docs,
      int cluster,
      boolean originalContentOnly,
      String... captions) {
    Collection<Document> result = null;
    // note: multiple sentiments are possible, they are ANDED
    if (sentiments == null || sentiments.length == 0) return result;

    Set<Document> docs_set = Util.castOrCloneAsSet(docs);
    if (sentiments.length == 1 && "all".equalsIgnoreCase(sentiments[0]))
      return getDocsWithAnyEmotions(indexer, docs_set, originalContentOnly);

    // note: we'll pass in null for docs, and intersect with the given set of docs later
    // otherwise we'd just be doing it again and again for each category and lexer
    Map<String, Collection<Document>> map =
        getEmotions(indexer, null, false, originalContentOnly, captions);
    for (int i = 0; i < sentiments.length; i++) {
      Collection<Document> temp1 =
          ("None".equalsIgnoreCase(sentiments[i]))
              ? getDocsWithNoEmotions(indexer, docs_set, originalContentOnly)
              : map.get(sentiments[i]);
      if (temp1 == null) { // no matches, just return
        result = new LinkedHashSet<Document>();
        return result;
      }
      if (result == null) result = temp1;
      else result.retainAll(temp1);
    }
    // result.retainAll(docs);
    return Util.setIntersection(result, docs_set);
  }
 public static void main(String[] args) {
   Collection c = new ArrayList();
   // 添加元素
   c.add("孙悟空");
   // 虽然集合里不能放基本类型的值,但Java支持自动装箱
   c.add(6);
   System.out.println("c集合的元素个数为:" + c.size());
   // 删除指定元素
   c.remove(6);
   System.out.println("c集合的元素个数为:" + c.size());
   // 判断是否包含指定字符串
   System.out.println("c集合的是否包含\"孙悟空\"字符串:" + c.contains("孙悟空"));
   c.add("轻量级Java EE企业应用实战");
   System.out.println("c集合的元素:" + c);
   Collection books = new HashSet();
   books.add("轻量级Java EE企业应用实战");
   books.add("疯狂Java讲义");
   System.out.println("c集合是否完全包含books集合?" + c.containsAll(books));
   // 用c集合减去books集合里的元素
   c.removeAll(books);
   System.out.println("c集合的元素:" + c);
   // 删除c集合里所有元素
   c.clear();
   System.out.println("c集合的元素:" + c);
   // books集合里只剩下c集合里也包含的元素
   books.retainAll(c);
   System.out.println("books集合的元素:" + books);
 }
 /**
  * Calculates the common parents of two nodes.
  *
  * @param nodeA A child.
  * @param nodeB A child.
  * @return The common parent of the two children.
  */
 private Collection<DAGNode> getCommonParents(Node nodeA, Node nodeB) {
   Collection<DAGNode> commonParents = new ArrayList<>();
   for (Node n : CommonQuery.COMMONGENLS.runQuery(dag_, nodeA, nodeB))
     commonParents.add((DAGNode) n);
   commonParents.retainAll(ptChildren_);
   return commonParents;
 }
  /** Method coolectPatternNodesToBeKeptInfo */
  private void collectPatternNodesToBeKeptInfo() {
    patternNodeIsToBeKept = new int[n_graph_actions][max_n_pattern_nodes];

    // init the arrays with -1
    for (int i = 0; i < n_graph_actions; i++)
      for (int j = 0; j < max_n_pattern_nodes; j++) patternNodeIsToBeKept[i][j] = -1;

    // for all nodes to be kept set the corresponding array entry to the
    // appropriate replacement node number
    for (Rule action : actionRuleMap.keySet()) {
      int act_id = actionRuleMap.get(action).intValue();

      // compute the set of pattern nodes to be kept for this action
      Collection<Node> pattern_nodes_to_keep = new HashSet<Node>();
      pattern_nodes_to_keep.addAll(action.getPattern().getNodes());
      if (action.getRight() != null) {
        Graph replacement = action.getRight();
        pattern_nodes_to_keep.retainAll(replacement.getNodes());
        // iterate over the pattern nodes to be kept and store their
        // corresponding replacement node number
        for (Node node : pattern_nodes_to_keep) {
          int node_num = pattern_node_num.get(act_id).get(node).intValue();
          patternNodeIsToBeKept[act_id][node_num] =
              replacement_node_num.get(act_id).get(node).intValue();
        }
      }
    }
  }
  protected Collection<EnforcedRelation> match(
      TIntHashSet subset, TIntObjectHashMap<Collection<EnforcedRelation>> index) {
    TIntIterator keyI = subset.iterator();

    int firstKey = keyI.next();

    if (index.get(firstKey) == null) {
      return Collections.emptyList();
    }
    Collection<EnforcedRelation> result = new ArrayList<EnforcedRelation>(index.get(firstKey));

    if (result.isEmpty()) {
      return Collections.emptyList();
    }

    while (keyI.hasNext()) {
      int key = keyI.next();

      if (result.isEmpty()) return result;

      Collection<EnforcedRelation> rest = index.get(key);
      if (rest == null) {
        return Collections.emptyList();
      }
      result.retainAll(rest);
    }

    return result;
  }
  private void processElements(
      Attribute attribute,
      CssElementType elementType,
      Map<String, Collection<FileObject>> elements2files) {
    CharSequence value = attribute.unquotedValue();
    if (value == null) {
      return;
    }

    if (value.length() == 0) {
      return; // ignore empty value
    }

    // all files containing the id declaration
    Collection<FileObject> filesWithTheId = elements2files.get(value.toString());

    // all referred files with the id declaration
    Collection<FileObject> referredFilesWithTheId = new LinkedList<FileObject>();
    if (filesWithTheId != null) {
      referredFilesWithTheId.addAll(filesWithTheId);
      referredFilesWithTheId.retainAll(referredFiles);
    }

    if (referredFilesWithTheId.isEmpty()) {
      // unknown id
      hints.add(
          new MissingCssElement(
              rule, context, getAttributeValueOffsetRange(attribute, context), filesWithTheId));
    }
  }
  public Collection<ValidationResult> validate(
      Date startDate,
      Date endDate,
      Collection<OrganisationUnit> sources,
      ValidationRuleGroup group) {
    Map<String, Double> constantMap = constantService.getConstantMap();

    Collection<ValidationResult> validationViolations = new HashSet<ValidationResult>();

    Collection<Period> relevantPeriods = periodService.getPeriodsBetweenDates(startDate, endDate);

    for (OrganisationUnit source : sources) {
      Collection<ValidationRule> relevantRules =
          getRelevantValidationRules(source.getDataElementsInDataSets());
      relevantRules.retainAll(group.getMembers());

      Set<DataElement> dataElements = getDataElementsInValidationRules(relevantRules);

      if (!relevantRules.isEmpty()) {
        for (Period period : relevantPeriods) {
          validationViolations.addAll(
              validateInternal(
                  period,
                  source,
                  relevantRules,
                  dataElements,
                  constantMap,
                  validationViolations.size()));
        }
      }
    }

    return validationViolations;
  }
 void filterSaslMechanisms(final Collection<String> names) {
   if (allow) {
     names.retainAll(this.names);
   } else {
     names.removeAll(this.names);
   }
   super.filterSaslMechanisms(names);
 }
Exemple #14
0
 @Override
 public boolean retainAll(Collection<?> arg0) {
   if (!copyDone) {
     copyDone = true;
     delegate = new HashSet<T>(delegate);
   }
   return delegate.retainAll(arg0);
 }
Exemple #15
0
  protected void setInitialState(Collection<Partition> partitionSpace)
      throws Switch.InfeasibleStateException {
    final List<Switch> edges = topology.findEdges();
    for (Switch edge : edges) {
      topology.getHelper(edge).initToNotOnSrc(edge, sourcePartitions.get(edge), true);
    }

    partitionSpace.retainAll(forwardingRules.keySet());
  }
    @Override
    public void flushUpdates() throws SailException {
      super.flushUpdates();

      while (updateNeeded) {
        try {
          // Determine which statements should be added and which should be
          // removed
          Collection<Statement> oldStatements = new HashSet<Statement>(256);
          Collection<Statement> newStatements = new HashSet<Statement>(256);

          evaluateIntoStatements(DIRECT_SUBCLASSOF_MATCHER, oldStatements);
          evaluateIntoStatements(DIRECT_SUBPROPERTYOF_MATCHER, oldStatements);
          evaluateIntoStatements(DIRECT_TYPE_MATCHER, oldStatements);

          evaluateIntoStatements(DIRECT_SUBCLASSOF_QUERY, newStatements);
          evaluateIntoStatements(DIRECT_SUBPROPERTYOF_QUERY, newStatements);
          evaluateIntoStatements(DIRECT_TYPE_QUERY, newStatements);

          logger.debug("existing virtual properties: {}", oldStatements.size());
          logger.debug("new virtual properties: {}", newStatements.size());

          // Remove the statements that should be retained from both sets
          Collection<Statement> unchangedStatements = new HashSet<Statement>(oldStatements);
          unchangedStatements.retainAll(newStatements);

          oldStatements.removeAll(unchangedStatements);
          newStatements.removeAll(unchangedStatements);

          logger.debug("virtual properties to remove: {}", oldStatements.size());
          logger.debug("virtual properties to add: {}", newStatements.size());

          Resource[] contexts = new Resource[] {null};

          for (Statement st : oldStatements) {
            removeInferredStatement(st.getSubject(), st.getPredicate(), st.getObject(), contexts);
          }

          for (Statement st : newStatements) {
            addInferredStatement(st.getSubject(), st.getPredicate(), st.getObject(), contexts);
          }

          updateNeeded = false;
        } catch (RDFHandlerException e) {
          Throwable t = e.getCause();
          if (t instanceof SailException) {
            throw (SailException) t;
          } else {
            throw new SailException(t);
          }
        } catch (QueryEvaluationException e) {
          throw new SailException(e);
        }

        super.flushUpdates();
      }
    }
 @SmallTest
 @MediumTest
 @LargeTest
 public void testCollectionRetainAllThrows() throws JSONException {
   try {
     Collection<Integer> collection = GraphObject.Factory.createList(Integer.class);
     collection.retainAll(Arrays.asList());
     fail("Expected exception");
   } catch (UnsupportedOperationException exception) {
   }
 }
 @Override
 protected Collection<String> getFieldsToHighlight(StoredDocument document) {
   Collection<String> result = super.getFieldsToHighlight(document);
   // if stored is false, then result will be empty, in which case just get all the param fields
   if (paramFields.isEmpty() == false && result.isEmpty() == false) {
     result.retainAll(paramFields);
   } else {
     result = paramFields;
   }
   return result;
 }
 public Collection<? extends Mapping> getAllMappings(Collection<?> collection) {
   Iterator<?> objects = collection.iterator();
   if (objects.hasNext()) {
     Collection<Mapping> result = new ArrayList<Mapping>(getMappings(objects.next()));
     while (objects.hasNext() && !result.isEmpty()) {
       result.retainAll(getMappings(objects.next()));
     }
     return result;
   } else {
     return Collections.emptySet();
   }
 }
 @Override
 public boolean retainAll(Collection<?> c) {
   checkNotNull(c);
   int oldSize = size(); // calls refreshIfEmpty
   boolean changed = delegate.retainAll(c);
   if (changed) {
     int newSize = delegate.size();
     totalSize += (newSize - oldSize);
     removeIfEmpty();
   }
   return changed;
 }
Exemple #21
0
 @Override
 public boolean retainAll(Collection<?> arg0) {
   try {
     writeLock.lock();
     if (!copyDone.getAndSet(true)) {
       delegate = new SyncSet<T>(delegate);
     }
     return delegate.retainAll(arg0);
   } finally {
     writeLock.unlock();
   }
 }
Exemple #22
0
    /**
     * Merge all MergeData. All MergeData elements should be disjunct (both views and digests).
     * However, this method is prepared to resolve duplicate entries (for the same member).
     * Resolution strategy for views is to merge only 1 of the duplicate members. Resolution
     * strategy for digests is to take the higher seqnos for duplicate digests.
     *
     * <p>After merging all members into a Membership and subsequent sorting, the first member of
     * the sorted membership will be the new coordinator. This method has a lock on merge_rsps.
     *
     * @param merge_rsps A list of MergeData items. Elements with merge_rejected=true were removed
     *     before. Is guaranteed not to be null and to contain at least 1 member.
     */
    private MergeData consolidateMergeData(List<MergeData> merge_rsps) {
      long logical_time = 0; // for new_vid
      List<View> subgroups =
          new ArrayList<View>(11); // contains a list of Views, each View is a subgroup
      Collection<Collection<Address>> sub_mbrships = new ArrayList<Collection<Address>>();

      for (MergeData tmp_data : merge_rsps) {
        View tmp_view = tmp_data.getView();
        if (tmp_view != null) {
          ViewId tmp_vid = tmp_view.getVid();
          if (tmp_vid != null) {
            // compute the new view id (max of all vids +1)
            logical_time = Math.max(logical_time, tmp_vid.getId());
          }
          // merge all membership lists into one (prevent duplicates)
          sub_mbrships.add(new ArrayList<Address>(tmp_view.getMembers()));
          subgroups.add(tmp_view.copy());
        }
      }

      // determine the new digest
      Digest new_digest = consolidateDigests(merge_rsps, merge_rsps.size());
      if (new_digest == null) return null;

      // remove all members from the new member list that are not in the digest
      Collection<Address> digest_mbrs = new_digest.getMembers();
      for (Collection<Address> coll : sub_mbrships) coll.retainAll(digest_mbrs);

      List<Address> merged_mbrs = gms.computeNewMembership(sub_mbrships);

      // the new coordinator is the first member of the consolidated & sorted membership list
      Address new_coord = merged_mbrs.isEmpty() ? null : merged_mbrs.get(0);
      if (new_coord == null) return null;

      // should be the highest view ID seen up to now plus 1
      ViewId new_vid = new ViewId(new_coord, logical_time + 1);

      // determine the new view
      MergeView new_view = new MergeView(new_vid, merged_mbrs, subgroups);

      if (log.isTraceEnabled())
        log.trace(
            gms.local_addr
                + ": consolidated view="
                + new_view
                + "\nconsolidated digest="
                + new_digest);
      return new MergeData(gms.local_addr, new_view, new_digest);
    }
    private static void findMatch (Cls unmatchedCls, Collection unmatchedInO1, Collection unmatchedInO2) {
     	Collection superclasses = Util.getDirectSuperclasses(unmatchedCls);
        Iterator supers = superclasses.iterator();
        while (supers.hasNext()) {
         	Cls nextSuper = (Cls)supers.next();
            // **** need to deal with multiple images here
            Cls nextSuperImage = (Cls)_results.getFirstImage(nextSuper);
            if (nextSuperImage != null) {
                Collection nextSubs = new ArrayList(Util.getDirectSubclasses(nextSuper));
    	        Collection nextSubsImages = AlgorithmUtils.getImages (nextSubs, _results);
        	Collection nextSuperImageSubs = new ArrayList(Util.getDirectSubclasses(nextSuperImage));
            	nextSuperImageSubs.removeAll(nextSubsImages);
                nextSuperImageSubs.retainAll(unmatchedInO2);
	        nextSubs.retainAll(unmatchedInO1);

                // now, nextSubs has unmatched subclasses of nextSuper
            	// and nextSuperImageSubs contains unmatched subclasses of nextSuperImage

	        if (!nextSubs.isEmpty() && !nextSuperImageSubs.isEmpty())
        	    compareNamesForSiblings (nextSubs, nextSuperImageSubs);
                AlgorithmUtils.removeClsesWithSingleParent (nextSubs, unmatchedInO1);
            }
        }
    }
 private void deleteFoodAssociations(Accompaniment root) {
   List<Food> foods = foodDAO.listByAccompanimentId(root.getId());
   for (Food food : foods) {
     Collection<Accompaniment> accompaniments = accompanimentDAO.listByFoodId(food.getId());
     Collection<Accompaniment> retainAll = new ArrayList<Accompaniment>();
     for (Accompaniment accompaniment : accompaniments) {
       if (!accompaniment.getId().equals(root.getId())) {
         retainAll.add(accompaniment);
       }
     }
     accompaniments.retainAll(retainAll);
     food.setAccompaniments(new HashSet<Accompaniment>(accompaniments));
     foodDAO.update(food);
   }
   root.setFoods(null);
 }
 public boolean retainAll(Collection c) {
   if (_inner == c || this == c) { // special case
     return false;
   }
   // bug #1819318 Problem while using SortedSet with Databinding
   if (_map instanceof LinkedHashMap || _map instanceof SortedMap) {
     return removePartial(_inner, c, false, false, true);
   } else { // bug #1839634 Problem while using HashSet with Databinding
     retainAllSelection(c);
     final boolean ret = _inner.retainAll(c);
     if (ret) {
       fireEvent(ListDataEvent.CONTENTS_CHANGED, -1, -1);
     }
     return ret;
   }
 }
  public Mapping getParentMapping(Collection<?> collection) {
    // Barring a better result, this will be the result.
    //
    Mapping result = this;

    // Cache the tree path for each object.
    //
    final Collection<List<?>> allTreePaths = new ArrayList<List<?>>();
    for (Object object : collection) {
      allTreePaths.add(domain.getTreePath(object));
    }

    // Iterate over the mappings in the tree.
    //
    OuterLoop:
    for (TreeIterator<Mapping> mappings = treeIterator(); mappings.hasNext(); ) {
      Mapping mapping = mappings.next();

      // Check to make sure that every object in the collection has an ancestor that is contained in
      // this mapping.
      //
      for (Iterator<List<?>> treePaths = allTreePaths.iterator(); treePaths.hasNext(); ) {
        List<?> treePath = treePaths.next();
        Collection<?> mappedObjects = mapping.getMappedObjects();
        mappedObjects.retainAll(treePath);

        // If the intersection is empty, i.e., no ancestor is in the mapping...
        //
        if (mappedObjects.isEmpty()) {
          // If this mapping isn't a parent, it's children definitely won't be either.
          //
          mappings.prune();
          continue OuterLoop;
        }
      }

      // Make sure the collections aren't identical...
      //
      Collection<?> mappedObjects = mapping.getMappedObjects();
      if (!collection.containsAll(mappedObjects) || !mappedObjects.containsAll(collection)) {
        result = mapping;
      }
    }

    return result;
  }
 public Collection<Workspace> findByTags(String[] filter) {
   assert filter != null;
   Collection<Workspace> result;
   if (filter.length == 0) {
     result = workspaceRepository.findAll();
   } else {
     result = new ArrayList<Workspace>();
     for (String tag : filter) {
       if (result.isEmpty()) {
         result = workspaceRepository.findByTagName(tag);
       } else {
         result.retainAll(workspaceRepository.findByTagName(tag));
       }
     }
   }
   assert result != null;
   return result;
 }
    private Collection calculatedDatabaseValuesToRemove(Collection collection)
    {
        Collection collection1 = null;
        for (Iterator iterator = collection.iterator(); iterator.hasNext();)
        {
            RemoteMapInfo remotemapinfo = (RemoteMapInfo)iterator.next();
            collection = new RemoteAmazonDataStorage(mContext, remotemapinfo);
            try
            {
                collection = collection.getAllDeletedData();
            }
            // Misplaced declaration of an exception variable
            catch (Collection collection)
            {
                MAPLog.w(TAG, (new StringBuilder("Failed to get deleted data from ")).append(remotemapinfo.getPackageName()).toString(), collection);
                collection = null;
            }
            if (collection == null)
            {
                MAPLog.w(TAG, String.format("Remote Package %s is unable to provide any deleted data", new Object[] {
                    remotemapinfo.toString()
                }));
            } else
            if (collection1 == null)
            {
                collection1 = collection;
            } else
            {
                collection1.retainAll(collection);
            }
        }

        collection = TAG;
        StringBuilder stringbuilder = new StringBuilder("Deleting Values: ");
        if (collection1 != null)
        {
            collection = collection1.toString();
        } else
        {
            collection = "None";
        }
        stringbuilder.append(collection).toString();
        return collection1;
    }
Exemple #29
0
  /**
   * Names of stages to sync: (ALL intersect SYNC) intersect (ALL minus SKIP).
   *
   * @param knownStageNames collection of known stage names (set ALL above).
   * @param toSync set SYNC above, or <code>null</code> to sync all known stages.
   * @param toSkip set SKIP above, or <code>null</code> to not skip any stages.
   * @return stage names.
   */
  public static Collection<String> getStagesToSync(
      final Collection<String> knownStageNames,
      Collection<String> toSync,
      Collection<String> toSkip) {
    if (toSkip == null) {
      toSkip = new HashSet<String>();
    } else {
      toSkip = new HashSet<String>(toSkip);
    }

    if (toSync == null) {
      toSync = new HashSet<String>(knownStageNames);
    } else {
      toSync = new HashSet<String>(toSync);
    }
    toSync.retainAll(knownStageNames);
    toSync.removeAll(toSkip);
    return toSync;
  }
Exemple #30
0
 public boolean retainAll(Collection<?> c) {
   if (_col == c || this == c) { // special case
     return false;
   }
   // bug #1819318 Problem while using SortedSet with Databinding
   if (_map instanceof LinkedHashMap || _map instanceof SortedMap) {
     return removePartial(_col, c, false, false, true);
   } else { // bug #1839634 Problem while using HashSet with Databinding
     clearSelection();
     for (Object o : c) {
       int index = indexOf(o);
       addSelectionInterval(index, index);
     }
     final boolean ret = _col.retainAll(c);
     if (ret) {
       fireEvent(ListDataEvent.CONTENTS_CHANGED, -1, -1);
     }
     return ret;
   }
 }