private Collection getFilterGroup(Activity activity) { Object obj = (FilteredActivity) activity .getClass() .getAnnotation(com / google / android / apps / wallet / filter / FilteredActivity); if (obj == null) { throw new IllegalArgumentException( String.valueOf(activity.getClass().getName()) .concat(" missing FilteredActivity annotation")); } if (((FilteredActivity) (obj)).group().equals("NONE")) { return new ArrayList(); } if (!activityFilters.containsKey(((FilteredActivity) (obj)).group())) { activity = String.valueOf(activity.getClass().getName()); obj = ((FilteredActivity) (obj)).group(); String s = String.valueOf(activityFilters.keySet()); throw new IllegalArgumentException( (new StringBuilder( String.valueOf(activity).length() + 49 + String.valueOf(obj).length() + String.valueOf(s).length())) .append(activity) .append(" has invalid group attribute: ") .append(((String) (obj))) .append(" valid groups are: ") .append(s) .toString()); } else { return activityFilters.get(((FilteredActivity) (obj)).group()); } }
/** * Post merge clean up. * * <p>- Remove the removed items. - Clear the state of all the items (this allow newly overridden * items to lose their WRITTEN state) - Set the items that are part of the new merge to be WRITTEN * to allow the next merge to be incremental. */ private void postMergeCleanUp() { ListMultimap<String, I> itemMap = ArrayListMultimap.create(); // remove all removed items, and copy the rest in the full map while resetting their state. for (S dataSet : mDataSets) { ListMultimap<String, I> map = dataSet.getDataMap(); List<String> keys = Lists.newArrayList(map.keySet()); for (String key : keys) { List<I> list = map.get(key); for (int i = 0; i < list.size(); ) { I item = list.get(i); if (item.isRemoved()) { list.remove(i); } else { //noinspection unchecked itemMap.put(key, (I) item.resetStatus()); i++; } } } } // for the last items (the one that have been written into the consumer), set their // state to WRITTEN for (String key : itemMap.keySet()) { List<I> itemList = itemMap.get(key); itemList.get(itemList.size() - 1).resetStatusToWritten(); } }
public void print( PdfWriter pdfWriter, GroupingContainer groupingContainer, Document document, Locale locale) throws DocumentException { if (notPrintOperationAtFirstPage()) { document.newPage(); } ListMultimap<String, OrderOperationComponent> titleToOperationComponent = groupingContainer.getTitleToOperationComponent(); for (String title : titleToOperationComponent.keySet()) { operationSectionHeader.print(document, title); int count = 0; for (OrderOperationComponent orderOperationComponent : groupingContainer.getTitleToOperationComponent().get(title)) { count++; operationOrderSection.print( pdfWriter, groupingContainer, orderOperationComponent.getOrder(), orderOperationComponent.getOperationComponent(), document, locale); if (count != titleToOperationComponent.get(title).size()) { if (notPrintOperationAtFirstPage()) { document.add(Chunk.NEXTPAGE); } } } } }
private void reportMissingDependencies( String type, ListMultimap<Artifact, DependencyNode> artifactNotFoundMap, ReportEntryType reportEntryType, TestSetStats testSuite) { for (Artifact artifact : sortArtifacts(artifactNotFoundMap.keySet())) { List<DependencyNode> roots = sortDependencyNodes(artifactNotFoundMap.get(artifact)); if (roots.size() == 1) { String msg = "Miss " + artifact + " in " + roots.get(0).getArtifact() + " (path " + findPathToDependency(artifact, roots.get(0)) + ")"; reportTestCase(type, reportEntryType, msg, null, testSuite); } else { String msg = "Miss " + artifact + " in " + roots.size() + " artifacts ..."; StringBuilder dsc = new StringBuilder(); dsc.append("Miss " + artifact + " in ..."); dsc.append(LINE_SEPARATOR).append(LINE_SEPARATOR); for (DependencyNode root : roots) { dsc.append(root.getArtifact() + " (path " + findPathToDependency(artifact, root) + ")"); dsc.append(LINE_SEPARATOR).append(LINE_SEPARATOR); } reportTestCase(type, reportEntryType, msg, dsc.toString(), testSuite); } } }
@Override public PlanNode visitUnion(UnionNode node, RewriteContext<Set<Symbol>> context) { // Find out which output symbols we need to keep ImmutableListMultimap.Builder<Symbol, Symbol> rewrittenSymbolMappingBuilder = ImmutableListMultimap.builder(); for (Symbol symbol : node.getOutputSymbols()) { if (context.get().contains(symbol)) { rewrittenSymbolMappingBuilder.putAll(symbol, node.getSymbolMapping().get(symbol)); } } ListMultimap<Symbol, Symbol> rewrittenSymbolMapping = rewrittenSymbolMappingBuilder.build(); // Find the corresponding input symbol to the remaining output symbols and prune the subplans ImmutableList.Builder<PlanNode> rewrittenSubPlans = ImmutableList.builder(); for (int i = 0; i < node.getSources().size(); i++) { ImmutableSet.Builder<Symbol> expectedInputSymbols = ImmutableSet.builder(); for (Collection<Symbol> symbols : rewrittenSymbolMapping.asMap().values()) { expectedInputSymbols.add(Iterables.get(symbols, i)); } rewrittenSubPlans.add( context.rewrite(node.getSources().get(i), expectedInputSymbols.build())); } return new UnionNode( node.getId(), rewrittenSubPlans.build(), rewrittenSymbolMapping, ImmutableList.copyOf(rewrittenSymbolMapping.keySet())); }
@Override public void serialize(final MapGenerator gen) { final ListMultimap<String, MultipartItem> items = LinkedListMultimap.create(); for (MultipartItem item : this.getItems()) { items.put(item.getName(), item); } for (final String name : items.keySet()) { final List<MultipartItem> values = items.get(name); if (values.size() == 1) { gen.map(name); MultipartItemMapper.serialize(gen, values.get(0)); gen.end(); } else { gen.array(name); values.forEach( (item) -> { gen.map(); MultipartItemMapper.serialize(gen, item); gen.end(); }); gen.end(); } } }
/** * Returns a combined map of user and group permissions, with group names prefixed by {@link * AccessControlLists#GROUP_PREFIX}. */ public ListMultimap<String, T> getAllPermissions() { ListMultimap<String, T> tmp = ArrayListMultimap.create(); tmp.putAll(userCache); for (String group : groupCache.keySet()) { tmp.putAll(AccessControlLists.GROUP_PREFIX + group, groupCache.get(group)); } return tmp; }
/** Builds a new {@code CellCollection} by removing all the given cell labels. */ public Term removeAll(Set<CellLabel> removeLabels) { Builder builder = builder(); cells .keySet() .stream() .filter(label -> !removeLabels.contains(label)) .forEach(label -> builder.addAll(cells.get(label))); builder.concatenate(collectionVariables); return builder.build(); }
private void printHistogramData(ListMultimap<String, Task> tasks, String category) { out.printf(" '%s': {\n", category); for (String function : tasks.keySet()) { out.printf(" '%s': google.visualization.arrayToDataTable(\n", function); out.print(" [['duration']"); for (Task task : tasks.get(function)) { out.printf(",[%f]", task.duration / 1000000.); } out.println("],\n false),"); } out.println(" },"); }
/** * Returns the number of items. * * @return the number of items. * @see DataMap */ @Override public int size() { // put all the resource keys in a set. Set<String> keys = Sets.newHashSet(); for (S resourceSet : mDataSets) { ListMultimap<String, I> map = resourceSet.getDataMap(); keys.addAll(map.keySet()); } return keys.size(); }
/** * Get a flat list of validation messages * * <p>One message has the form: * * <pre> * #/pointer/here: message here * </pre> * * <p>The list is sorted by pointer. * * @return the list of messages */ public List<String> getMessages() { final Iterable<JsonPointer> paths = Ordering.natural().sortedCopy(msgMap.keySet()); final ImmutableList.Builder<String> builder = ImmutableList.builder(); List<Message> messages; for (final JsonPointer path : paths) { messages = MESSAGE_ORDER.sortedCopy(msgMap.get(path)); for (final Message msg : messages) builder.add(path + ": " + msg); } return builder.build(); }
/** * Retrieve all messages as a JSON object * * <p>The retrieved JSON document is an object where: * * <ul> * <li>keys are string representations of {@link JsonPointer}s, * <li>values are arrays of objects where each individual object is the JSON representation of * one message. * </ul> * * <p>Note: the returned {@link JsonNode} is mutable. * * @see Message#toJsonNode() * @return a JSON object with all validation messages */ public JsonNode asJsonObject() { final ObjectNode ret = JsonNodeFactory.instance.objectNode(); ArrayNode node; List<Message> messages; for (final JsonPointer ptr : msgMap.keySet()) { node = JsonNodeFactory.instance.arrayNode(); messages = MESSAGE_ORDER.sortedCopy(msgMap.get(ptr)); for (final Message message : messages) node.add(message.toJsonNode()); ret.put(ptr.toString(), node); } return ret; }
private List<BrokenRule> getOrderedRuleList(ListMultimap<String, String> suppressionsByRule) { List<BrokenRule> brokenRuleList = new ArrayList(); for (String rule : suppressionsByRule.keySet()) { BrokenRule br = new BrokenRule(); br.name = rule; br.frequency = suppressionsByRule.get(rule).size(); brokenRuleList.add(br); } Collections.sort(brokenRuleList, new OriderByBroken()); return brokenRuleList; }
private <N extends InternalTitanVertex> void persist( ListMultimap<N, InternalRelation> mutatedEdges, Map<TitanType, TypeSignature> signatures, InternalTitanTransaction tx, StoreMutator mutator) throws StorageException { assert mutatedEdges != null && !mutatedEdges.isEmpty(); Collection<N> vertices = mutatedEdges.keySet(); // if (sortNodes) { // List<N> sortedvertices = new ArrayList<N>(vertices); // Collections.sort(sortedvertices, new Comparator<N>(){ // // @Override // public int compare(N o1, N o2) { // assert o1.getID()!=o2.getID(); // if (o1.getID()<o2.getID()) return -1; // else return 1; // } // // }); // vertices=sortedvertices; // } for (N node : vertices) { List<InternalRelation> edges = mutatedEdges.get(node); List<Entry> additions = new ArrayList<Entry>(edges.size()); List<ByteBuffer> deletions = new ArrayList<ByteBuffer>(Math.max(10, edges.size() / 10)); List<TitanProperty> properties = new ArrayList<TitanProperty>(); for (InternalRelation edge : edges) { if (edge.isRemoved()) { if (edge.isProperty()) { deleteIndexEntry((TitanProperty) edge, mutator); } deletions.add(getEntry(tx, edge, node, signatures, true).getColumn()); } else { assert edge.isNew(); if (edge.isProperty()) properties.add((TitanProperty) edge); additions.add(getEntry(tx, edge, node, signatures)); } } mutator.mutateEdges(IDHandler.getKey(node.getID()), additions, deletions); // Persist property index for retrieval for (TitanProperty prop : properties) { addIndexEntry(prop, mutator); } } }
private void readInjectedDependencies() { File injectedDepFile = new File(getConfigDir(), "injectedDependencies.json"); if (!injectedDepFile.exists()) { FMLLog.getLogger() .log( Level.DEBUG, "File {} not found. No dependencies injected", injectedDepFile.getAbsolutePath()); return; } JsonParser parser = new JsonParser(); JsonElement injectedDeps; try { injectedDeps = parser.parse(new FileReader(injectedDepFile)); for (JsonElement el : injectedDeps.getAsJsonArray()) { JsonObject jo = el.getAsJsonObject(); String modId = jo.get("modId").getAsString(); JsonArray deps = jo.get("deps").getAsJsonArray(); for (JsonElement dep : deps) { JsonObject depObj = dep.getAsJsonObject(); String type = depObj.get("type").getAsString(); if (type.equals("before")) { injectedBefore.put( modId, VersionParser.parseVersionReference(depObj.get("target").getAsString())); } else if (type.equals("after")) { injectedAfter.put( modId, VersionParser.parseVersionReference(depObj.get("target").getAsString())); } else { FMLLog.getLogger().log(Level.ERROR, "Invalid dependency type {}", type); throw new RuntimeException("Unable to parse type"); } } } } catch (Exception e) { FMLLog.getLogger().log(Level.ERROR, "Unable to parse {} - skipping", injectedDepFile); FMLLog.getLogger().throwing(Level.ERROR, e); return; } FMLLog.getLogger() .log( Level.DEBUG, "Loaded {} injected dependencies on modIds: {}", injectedBefore.size(), injectedBefore.keySet()); }
/** * Sets the post blob load state to TOUCHED. * * <p>After a load from the blob file, all items have their state set to nothing. If the load mode * is not set to incrementalState then we want the items that are in the current merge result to * have their state be TOUCHED. * * <p>This will allow the first use of {@link #mergeData(MergeConsumer, boolean)} to add these to * the consumer as if they were new items. * * @see #loadFromBlob(java.io.File, boolean) * @see DataItem#isTouched() */ private void setPostBlobLoadStateToTouched() { ListMultimap<String, I> itemMap = ArrayListMultimap.create(); // put all the sets into list per keys. The order is important as the lower sets are // overridden by the higher sets. for (S dataSet : mDataSets) { ListMultimap<String, I> map = dataSet.getDataMap(); for (Map.Entry<String, Collection<I>> entry : map.asMap().entrySet()) { itemMap.putAll(entry.getKey(), entry.getValue()); } } // the items that represent the current state is the last item in the list for each key. for (String key : itemMap.keySet()) { List<I> itemList = itemMap.get(key); itemList.get(itemList.size() - 1).resetStatusToTouched(); } }
/** * Return the list of validation messages as a JSON array * * <p>This method makes its best to order validation messages correctly. * * <p>Each message in the resulting array is a JSON object, with the contents of the {@link * Message} and with an added member named {@code path}, which contains the path into the instance * where the error has occurred (as a {@link JsonPointer}). * * @see Message#toJsonNode() * @return a JSON array with all validation messages */ public JsonNode asJsonArray() { final ArrayNode ret = JsonNodeFactory.instance.arrayNode(); ObjectNode node; final Iterable<JsonPointer> paths = Ordering.natural().sortedCopy(msgMap.keySet()); List<Message> messages; for (final JsonPointer ptr : paths) { messages = MESSAGE_ORDER.sortedCopy(msgMap.get(ptr)); for (final Message msg : messages) { node = JsonNodeFactory.instance.objectNode().put("path", ptr.toString()); // I hate to do that... node.putAll((ObjectNode) msg.toJsonNode()); ret.add(node); } } return ret; }
/** @see org.eclipse.jface.viewers.ITreeContentProvider#getElements(java.lang.Object) */ @Override public Object[] getElements(Object inputElement) { if (inputElement == null) { nameSpaces = null; return new Object[0]; } @SuppressWarnings("unchecked") Collection<? extends TypeDefinition> types = (Collection<? extends TypeDefinition>) inputElement; nameSpaces = ArrayListMultimap.create(); for (TypeDefinition type : types) { // only show types with mappable flag if (!type.getConstraint(MappableFlag.class).isEnabled()) continue; String ns = type.getName().getNamespaceURI(); if (XMLConstants.NULL_NS_URI.equals(ns)) ns = "(no namespace)"; nameSpaces.put(ns, type); } return nameSpaces.keySet().toArray(); }
private List<VariableAccess> getUnsequencedAccesses() { if (!hasModification) { return Collections.emptyList(); } ListMultimap<IVariableBinding, VariableAccess> accessesByVar = ArrayListMultimap.create(); for (VariableAccess access : orderedAccesses) { accessesByVar.put(access.variable, access); } Set<VariableAccess> unsequencedAccesses = Sets.newHashSet(); for (IVariableBinding var : accessesByVar.keySet()) { findUnsequenced(accessesByVar.get(var), unsequencedAccesses); } List<VariableAccess> orderedUnsequencedAccesses = Lists.newArrayListWithCapacity(unsequencedAccesses.size()); for (VariableAccess access : orderedAccesses) { if (unsequencedAccesses.contains(access)) { orderedUnsequencedAccesses.add(access); } } return orderedUnsequencedAccesses; }
private List<BlameInfo> blame(ObjectId id, String path, Repository repository, RevWalk revWalk) throws IOException { ListMultimap<BlameInfo, RangeInfo> ranges = ArrayListMultimap.create(); List<BlameInfo> result = new ArrayList<>(); if (blameCache.findLastCommit(repository, id, path) == null) { return result; } List<Region> blameRegions = blameCache.get(repository, id, path); int from = 1; for (Region region : blameRegions) { RevCommit commit = revWalk.parseCommit(region.getSourceCommit()); BlameInfo blameInfo = toBlameInfo(commit, region.getSourceAuthor()); ranges.put(blameInfo, new RangeInfo(from, from + region.getCount() - 1)); from += region.getCount(); } for (BlameInfo key : ranges.keySet()) { key.ranges = ranges.get(key); result.add(key); } return result; }
private static int numOfMultiplicityCellLabels( ListMultimap<CellLabel, Cell> cells, Definition definition) { int count = 0; for (CellLabel cellLabel : cells.keySet()) { if (definition.cellMultiplicity(cellLabel) == ConfigurationInfo.Multiplicity.STAR) { count++; } else { if (cells.get(cellLabel).size() != 1) { throw KEMException.criticalError( "Cell label " + cellLabel + " does not have " + "multiplicity='*', but multiple cells found: " + cells.get(cellLabel) + "\nExamine the last rule applied to determine the source of the error."); } } } assert count <= 1 : "Multiple types of starred cells in one cell collection not supported at present"; return count; }
@Override public void save( final Collection<InternalRelation> addedRelations, final Collection<InternalRelation> deletedRelations, final InternalTitanTransaction tx) throws StorageException { // Setup log.debug( "Saving transaction. Added {}, removed {}", addedRelations.size(), deletedRelations.size()); final Map<TitanType, TypeSignature> signatures = new HashMap<TitanType, TypeSignature>(); final TransactionHandle txh = tx.getTxHandle(); final StoreMutator mutator = getStoreMutator(txh); final boolean acquireLocks = tx.getTxConfiguration().hasAcquireLocks(); // 1. Assign TitanVertex IDs assignIDs(addedRelations, tx); for (int saveAttempt = 0; saveAttempt < maxWriteRetryAttempts; saveAttempt++) { // while (true) { //Indefinite loop, broken if no exception occurs, otherwise retried // or failed immediately try { // 2. Collect deleted edges ListMultimap<InternalTitanVertex, InternalRelation> mutations = ArrayListMultimap.create(); if (deletedRelations != null && !deletedRelations.isEmpty()) { for (InternalRelation del : deletedRelations) { assert del.isRemoved(); for (int pos = 0; pos < del.getArity(); pos++) { InternalTitanVertex node = del.getVertex(pos); if (pos == 0 || !del.isUnidirected()) { mutations.put(node, del); } if (pos == 0 && acquireLocks && del.getType().isFunctional() && ((InternalTitanType) del.getType()).isFunctionalLocking()) { Entry entry = getEntry(tx, del, node, signatures); mutator.acquireEdgeLock( IDHandler.getKey(node.getID()), entry.getColumn(), entry.getValue()); } } if (acquireLocks && del.isProperty()) { lockKeyedProperty((TitanProperty) del, mutator); } } } ListMultimap<InternalTitanType, InternalRelation> simpleEdgeTypes = null; ListMultimap<InternalTitanType, InternalRelation> otherEdgeTypes = null; // 3. Sort Added Edges for (InternalRelation edge : addedRelations) { if (edge.isRemoved()) continue; assert edge.isNew(); TitanType et = edge.getType(); // Give special treatment to edge type definitions if (SystemTypeManager.prepersistedSystemTypes.contains(et)) { assert edge.getVertex(0) instanceof InternalTitanType; InternalTitanType node = (InternalTitanType) edge.getVertex(0); assert node.hasID(); if (node.isSimple()) { if (simpleEdgeTypes == null) simpleEdgeTypes = ArrayListMultimap.create(); simpleEdgeTypes.put(node, edge); } else { if (otherEdgeTypes == null) otherEdgeTypes = ArrayListMultimap.create(); otherEdgeTypes.put(node, edge); } } else { // STANDARD TitanRelation assert (edge.getArity() == 1 && edge.isProperty()) || (edge.getArity() == 2 && edge.isEdge()); for (int pos = 0; pos < edge.getArity(); pos++) { InternalTitanVertex node = edge.getVertex(pos); assert node.hasID(); if (pos == 0 || !edge.isUnidirected()) { mutations.put(node, edge); } if (pos == 0 && acquireLocks && edge.getType().isFunctional() && !node.isNew() && ((InternalTitanType) edge.getType()).isFunctionalLocking()) { Entry entry = getEntry(tx, edge, node, signatures, true); mutator.acquireEdgeLock(IDHandler.getKey(node.getID()), entry.getColumn(), null); } } } if (acquireLocks && edge.isProperty()) { lockKeyedProperty((TitanProperty) edge, mutator); } } // 3. Persist if (simpleEdgeTypes != null) persist(simpleEdgeTypes, signatures, tx, mutator); if (otherEdgeTypes != null) persist(otherEdgeTypes, signatures, tx, mutator); mutator.flush(); // Commit saved EdgeTypes to TypeManager if (simpleEdgeTypes != null) commitEdgeTypes(simpleEdgeTypes.keySet()); if (otherEdgeTypes != null) commitEdgeTypes(otherEdgeTypes.keySet()); if (!mutations.isEmpty()) persist(mutations, signatures, tx, mutator); mutator.flush(); // Successfully completed - return to break out of loop break; } catch (Throwable e) { if (e instanceof TemporaryStorageException) { if (saveAttempt < maxWriteRetryAttempts - 1) temporaryStorageException(e); else throw new PermanentStorageException( "Tried committing " + maxWriteRetryAttempts + " times on temporary exception without success", e); } else if (e instanceof StorageException) { throw (StorageException) e; } else { throw new PermanentStorageException( "Unidentified exception occurred during persistence", e); } } } }
public SuppressionScrap(String fileName) { File fXmlFile = new File(fileName); DocumentBuilderFactory dbFactory = DocumentBuilderFactory.newInstance(); DocumentBuilder dBuilder = null; try { dBuilder = dbFactory.newDocumentBuilder(); Document doc = dBuilder.parse(fXmlFile); doc.getDocumentElement().normalize(); NodeList nList = doc.getElementsByTagName("suppress"); for (int temp = 0; temp < nList.getLength(); temp++) { Node nNode = nList.item(temp); if (nNode.getNodeType() == Node.ELEMENT_NODE) { Element eElement = (Element) nNode; String rule = eElement.getAttribute("checks"); String file = eElement.getAttribute("files"); if (file.indexOf('[') == -1) { if (rule.equals("")) { classSuppressionsByWildRule.put(rule, file); } else { classSuppressionsByRule.put(rule, file); rulesByClass.put(file, rule); } } else { if (rule.equals("")) { pakageSuppressionsByWildRule.put(rule, file); } else { pakageSuppressionsByRule.put(rule, file); rulesByPakage.put(file, rule); } } allSuppressionsByRule.put(rule, file); } } Date date = new Date(); System.out.println("===" + date + " Suppression stats ==="); System.out.println(); System.out.println( "Total Suppression's = " + allSuppressionsByRule.size()); System.out.println( "Total Unique suppression's = " + allSuppressionsByRule.keySet().size()); System.out.println( "Total Packages with wildcard suppression = " + pakageSuppressionsByWildRule.size()); System.out.println( "Total Packages with specific suppression = " + rulesByPakage.keySet().size()); System.out.println( "Total Classes with wildcard suppression = " + classSuppressionsByWildRule.size()); System.out.println( "Total Classes with specific suppression = " + rulesByClass.keySet().size()); System.out.println(); System.out.println("---Suppressed Packages Ordered by Suppression instance count---"); printMostBrokenRule(rulesByPakage); System.out.println(); System.out.println("---Suppressed Classes Ordered by Suppression instance count---"); printMostBrokenRule(rulesByClass); System.out.println(); System.out.println("---Suppression's Ordered by most broken grouped by Package"); printMostBrokenRule(pakageSuppressionsByRule); System.out.println(); System.out.println("---Suppression's Ordered by most broken grouped by Classes"); printMostBrokenRule(classSuppressionsByRule); System.out.println(); } catch (ParserConfigurationException e) { e.printStackTrace(); } catch (SAXException e) { e.printStackTrace(); } catch (IOException e) { e.printStackTrace(); } }
private Map<FieldName, ? extends ClassificationMap<?>> evaluateRuleSet( ModelManagerEvaluationContext context) { RuleSetModel ruleSetModel = getModel(); RuleSet ruleSet = ruleSetModel.getRuleSet(); List<RuleSelectionMethod> ruleSelectionMethods = ruleSet.getRuleSelectionMethods(); RuleSelectionMethod ruleSelectionMethod; // "If more than one method is included, the first method is used as the default method for // scoring" if (ruleSelectionMethods.size() > 0) { ruleSelectionMethod = ruleSelectionMethods.get(0); } else { throw new InvalidFeatureException(ruleSet); } // Both the ordering of keys and values is significant ListMultimap<String, SimpleRule> firedRules = LinkedListMultimap.create(); List<Rule> rules = ruleSet.getRules(); for (Rule rule : rules) { collectFiredRules(firedRules, rule, context); } RuleClassificationMap result = new RuleClassificationMap(); RuleSelectionMethod.Criterion criterion = ruleSelectionMethod.getCriterion(); Set<String> keys = firedRules.keySet(); for (String key : keys) { List<SimpleRule> keyRules = firedRules.get(key); switch (criterion) { case FIRST_HIT: { SimpleRule winner = keyRules.get(0); // The first value of the first key if (result.getEntity() == null) { result.setEntity(winner); } result.put(key, winner.getConfidence()); } break; case WEIGHTED_SUM: { SimpleRule winner = null; double totalWeight = 0; for (SimpleRule keyRule : keyRules) { if (winner == null || (winner.getWeight() < keyRule.getWeight())) { winner = keyRule; } totalWeight += keyRule.getWeight(); } result.put(winner, key, totalWeight / firedRules.size()); } break; case WEIGHTED_MAX: { SimpleRule winner = null; for (SimpleRule keyRule : keyRules) { if (winner == null || (winner.getWeight() < keyRule.getWeight())) { winner = keyRule; } } result.put(winner, key, winner.getConfidence()); } break; default: throw new UnsupportedFeatureException(ruleSelectionMethod, criterion); } } return TargetUtil.evaluateClassification(result, context); }
@Override public Set<K> keySet() { return backingMap.keySet(); }
/** * Merges the data into a given consumer. * * @param consumer the consumer of the merge. * @param doCleanUp clean up the state to be able to do further incremental merges. If this is a * one-shot merge, this can be false to improve performance. * @throws MergingException such as a DuplicateDataException or a MergeConsumer.ConsumerException * if something goes wrong */ public void mergeData(@NonNull MergeConsumer<I> consumer, boolean doCleanUp) throws MergingException { consumer.start(mFactory); try { // get all the items keys. Set<String> dataItemKeys = Sets.newHashSet(); for (S dataSet : mDataSets) { // quick check on duplicates in the resource set. dataSet.checkItems(); ListMultimap<String, I> map = dataSet.getDataMap(); dataItemKeys.addAll(map.keySet()); } // loop on all the data items. for (String dataItemKey : dataItemKeys) { if (requiresMerge(dataItemKey)) { // get all the available items, from the lower priority, to the higher // priority List<I> items = Lists.newArrayListWithExpectedSize(mDataSets.size()); for (S dataSet : mDataSets) { // look for the resource key in the set ListMultimap<String, I> itemMap = dataSet.getDataMap(); List<I> setItems = itemMap.get(dataItemKey); items.addAll(setItems); } mergeItems(dataItemKey, items, consumer); continue; } // for each items, look in the data sets, starting from the end of the list. I previouslyWritten = null; I toWrite = null; /* * We are looking for what to write/delete: the last non deleted item, and the * previously written one. */ boolean foundIgnoredItem = false; setLoop: for (int i = mDataSets.size() - 1; i >= 0; i--) { S dataSet = mDataSets.get(i); // look for the resource key in the set ListMultimap<String, I> itemMap = dataSet.getDataMap(); List<I> items = itemMap.get(dataItemKey); if (items.isEmpty()) { continue; } // The list can contain at max 2 items. One touched and one deleted. // More than one deleted means there was more than one which isn't possible // More than one touched means there is more than one and this isn't possible. for (int ii = items.size() - 1; ii >= 0; ii--) { I item = items.get(ii); if (consumer.ignoreItemInMerge(item)) { foundIgnoredItem = true; continue; } if (item.isWritten()) { assert previouslyWritten == null; previouslyWritten = item; } if (toWrite == null && !item.isRemoved()) { toWrite = item; } if (toWrite != null && previouslyWritten != null) { break setLoop; } } } // done searching, we should at least have something, unless we only // found items that are not meant to be written (attr inside declare styleable) assert foundIgnoredItem || previouslyWritten != null || toWrite != null; //noinspection ConstantConditions if (previouslyWritten == null && toWrite == null) { continue; } // now need to handle, the type of each (single res file, multi res file), whether // they are the same object or not, whether the previously written object was deleted. if (toWrite == null) { // nothing to write? delete only then. assert previouslyWritten.isRemoved(); consumer.removeItem(previouslyWritten, null /*replacedBy*/); } else if (previouslyWritten == null || previouslyWritten == toWrite) { // easy one: new or updated res consumer.addItem(toWrite); } else { // replacement of a resource by another. // force write the new value toWrite.setTouched(); consumer.addItem(toWrite); // and remove the old one consumer.removeItem(previouslyWritten, toWrite); } } } finally { consumer.end(); } if (doCleanUp) { // reset all states. We can't just reset the toWrite and previouslyWritten objects // since overlayed items might have been touched as well. // Should also clean (remove) objects that are removed. postMergeCleanUp(); } }
@Override protected void postWriteAction() throws ConsumerException { // now write the values files. for (String key : mValuesResMap.keySet()) { // the key is the qualifier. // check if we have to write the file due to deleted values. // also remove it from that list anyway (to detect empty qualifiers later). boolean mustWriteFile = mQualifierWithDeletedValues.remove(key); // get the list of items to write List<ResourceItem> items = mValuesResMap.get(key); // now check if we really have to write it if (!mustWriteFile) { for (ResourceItem item : items) { if (item.isTouched()) { mustWriteFile = true; break; } } } if (mustWriteFile) { String folderName = key.isEmpty() ? ResourceFolderType.VALUES.getName() : ResourceFolderType.VALUES.getName() + RES_QUALIFIER_SEP + key; try { File valuesFolder = new File(getRootFolder(), folderName); createDir(valuesFolder); File outFile = new File(valuesFolder, FN_VALUES_XML); DocumentBuilderFactory factory = DocumentBuilderFactory.newInstance(); factory.setNamespaceAware(true); factory.setValidating(false); factory.setIgnoringComments(true); DocumentBuilder builder; builder = factory.newDocumentBuilder(); Document document = builder.newDocument(); Node rootNode = document.createElement(TAG_RESOURCES); document.appendChild(rootNode); Collections.sort(items); ResourceFile currentFile = null; for (ResourceItem item : items) { ResourceFile source = item.getSource(); if (source != currentFile) { currentFile = source; rootNode.appendChild(document.createTextNode("\n")); File file = source.getFile(); rootNode.appendChild(document.createComment(createPathComment(file))); rootNode.appendChild(document.createTextNode("\n")); } Node adoptedNode = NodeUtils.adoptNode(document, item.getValue()); rootNode.appendChild(adoptedNode); } String content; try { content = XmlPrettyPrinter.prettyPrint(document, true); } catch (Throwable t) { content = XmlUtils.toXml(document, false); } Files.write(content, outFile, Charsets.UTF_8); } catch (Throwable t) { throw new ConsumerException(t); } } } // now remove empty values files. for (String key : mQualifierWithDeletedValues) { String folderName = key != null && !key.isEmpty() ? ResourceFolderType.VALUES.getName() + RES_QUALIFIER_SEP + key : ResourceFolderType.VALUES.getName(); removeOutFile(folderName, FN_VALUES_XML); } }
public Set<CellLabel> labelSet() { return cells.keySet(); }
public static void main(String[] args) throws ParseException, IOException { Options options = createOptions(); CommandLineParser parser = new GnuParser(); CommandLine cmd = parser.parse(options, args); if (cmd.hasOption("?")) { new HelpFormatter().printHelp("java " + ShowMoves.class.getName(), options); System.exit(1); } File rootFile = new File(cmd.getOptionValue("root")); File movesFile = new File(cmd.getOptionValue("moves")); Predicate<ClassName> packageFilter = new PackagePredicate(cmd.getOptionValue("package", "")); String groupPrefix = cmd.getOptionValue("group", ""); File refsFile = cmd.hasOption("refs") ? new File(cmd.getOptionValue("refs")) : null; // scan all the pom.xml files Modules modules = new Modules(); modules.scan(rootFile, groupPrefix); // load the moves and refs files ClassLocations moves = ClassLocations.parseFile(movesFile, groupPrefix); ClassLocations refs = (refsFile != null) ? ClassLocations.parseFile(refsFile, groupPrefix) : null; // scan the compiled classes of all the maven targets ClassScanner classScanner = new ClassScanner(packageFilter); classScanner.scan(modules.getAllModules()); ClassLocations locations = classScanner.getLocations(); ClassDependencies dependencies = classScanner.getDependencies(); // apply the moves file locations.moveAll(moves); // apply the refs file, if one was specified if (refs != null) { for (ModuleName moduleName : refs.getAllModules()) { ClassName refsName = new ClassName(moduleName + ":" + refsFile); locations.add(refsName, moduleName); dependencies.add(refsName, refs.getClasses(moduleName)); } } // find modules that reference classes they don't have access to Map<ModuleName, ListMultimap<ClassName, ClassName>> brokenMap = Maps.newHashMap(); for (Map.Entry<ClassName, ModuleName> entry : locations.getLocations()) { ClassName className = entry.getKey(); ModuleName moduleName = entry.getValue(); Set<ClassName> referencedClasses = dependencies.getReferencedClasses(className); ListMultimap<ClassName, ClassName> moduleBrokenMap = null; for (ClassName referencedClass : referencedClasses) { ModuleName referencedModule = locations.getModule(referencedClass); if (referencedModule != null && !modules.isDependentOf(moduleName, referencedModule)) { if (moduleBrokenMap == null) { moduleBrokenMap = brokenMap.get(moduleName); if (moduleBrokenMap == null) { brokenMap.put(moduleName, moduleBrokenMap = ArrayListMultimap.create()); } } moduleBrokenMap.put(className, referencedClass); } } } // report broken dependencies System.out.println(); for (ModuleName moduleName : Utils.sorted(brokenMap.keySet())) { ListMultimap<ClassName, ClassName> missingMap = brokenMap.get(moduleName); System.out.println(); System.out.println(moduleName.toString(groupPrefix)); for (ClassName className : Utils.sorted(missingMap.keySet())) { System.out.println(" " + className); for (ClassName referencedClass : Utils.sorted(missingMap.get(className))) { ModuleName referencedModule = locations.getModule(referencedClass); System.out.println( " " + referencedClass + " (" + referencedModule.toString(groupPrefix) + ")"); } } } }
private void integrateIntoHistory(ChangeSet cs, IdentifiedUser caller) throws IntegrationException, NoSuchChangeException, ResourceConflictException { logDebug("Beginning merge attempt on {}", cs); Map<Branch.NameKey, ListMultimap<SubmitType, ChangeData>> toSubmit = new HashMap<>(); logDebug("Perform the merges"); try { Multimap<Project.NameKey, Branch.NameKey> br = cs.branchesByProject(); Multimap<Branch.NameKey, ChangeData> cbb = cs.changesByBranch(); for (Project.NameKey project : br.keySet()) { openRepository(project); for (Branch.NameKey branch : br.get(project)) { setDestProject(branch); ListMultimap<SubmitType, ChangeData> submitting = validateChangeList(cbb.get(branch)); toSubmit.put(branch, submitting); Set<SubmitType> submitTypes = new HashSet<>(submitting.keySet()); for (SubmitType submitType : submitTypes) { SubmitStrategy strategy = createStrategy(branch, submitType, getBranchTip(branch), caller); MergeTip mergeTip = preMerge(strategy, submitting.get(submitType), getBranchTip(branch)); mergeTips.put(branch, mergeTip); updateChangeStatus(submitting.get(submitType), branch, true, caller); } inserter.flush(); } closeRepository(); } logDebug("Write out the new branch tips"); SubmoduleOp subOp = subOpProvider.get(); for (Project.NameKey project : br.keySet()) { openRepository(project); for (Branch.NameKey branch : br.get(project)) { RefUpdate update = updateBranch(branch, caller); pendingRefUpdates.remove(branch); setDestProject(branch); ListMultimap<SubmitType, ChangeData> submitting = toSubmit.get(branch); for (SubmitType submitType : submitting.keySet()) { updateChangeStatus(submitting.get(submitType), branch, false, caller); updateSubmoduleSubscriptions(subOp, branch, getBranchTip(branch)); } if (update != null) { fireRefUpdated(branch, update); } } closeRepository(); } updateSuperProjects(subOp, br.values()); checkState( pendingRefUpdates.isEmpty(), "programmer error: " + "pending ref update list not emptied"); } catch (NoSuchProjectException noProject) { logWarn("Project " + noProject.project() + " no longer exists, " + "abandoning open changes"); abandonAllOpenChanges(noProject.project()); } catch (OrmException e) { throw new IntegrationException("Cannot query the database", e); } catch (IOException e) { throw new IntegrationException("Cannot query the database", e); } finally { closeRepository(); } }