@Override public String toString() { List<ITextSegment> list = toTokenAndGapList(); if (list.isEmpty()) return "(empty)"; Multimap<IHiddenRegion, IEObjectRegion> hiddens = LinkedListMultimap.create(); List<String> errors = Lists.newArrayList(); ITextRegionAccess access = list.get(0).getTextRegionAccess(); TreeIterator<EObject> all = EcoreUtil2.eAll(access.regionForRootEObject().getSemanticElement()); while (all.hasNext()) { EObject element = all.next(); IEObjectRegion obj = access.regionForEObject(element); if (obj == null) continue; IHiddenRegion previous = obj.getPreviousHiddenRegion(); IHiddenRegion next = obj.getNextHiddenRegion(); if (previous == null) errors.add("ERROR: " + EmfFormatter.objPath(element) + " has no leading HiddenRegion."); else hiddens.put(previous, obj); if (previous != next) { if (next == null) errors.add("ERROR: " + EmfFormatter.objPath(element) + " has no trailing HiddenRegion."); else hiddens.put(next, obj); } } TextRegionListToString result = new TextRegionListToString(); if (!hideColumnExplanation) { result.add("Columns: 1:offset 2:length 3:kind 4: text 5:grammarElement", false); result.add("Kind: H=IHiddenRegion S=ISemanticRegion B/E=IEObjectRegion", false); result.add("", false); } for (String error : errors) result.add(error, false); int indentation = 0; for (ITextSegment region : list) { List<IEObjectRegion> previous = Lists.newArrayList(); List<IEObjectRegion> next = Lists.newArrayList(); List<String> middle = Lists.newArrayList(toString(region)); if (region instanceof IHiddenRegion) { Collection<IEObjectRegion> found = hiddens.get((IHiddenRegion) region); for (IEObjectRegion obj : found) { boolean p = obj.getNextHiddenRegion().equals(region); boolean n = obj.getPreviousHiddenRegion().equals(region); if (p && n) middle.add(EMPTY_TITLE + "Semantic " + toString(obj)); else if (p) previous.add(obj); else if (n) next.add(obj); } Collections.sort(previous, AstRegionComparator.CHILDREN_FIRST); Collections.sort(next, AstRegionComparator.CONTAINER_FIRST); } for (IEObjectRegion obj : previous) { indentation--; result.add(indent(indentation) + EOBJECT_END_PADDED + toString(obj)); } String indent = indent(indentation); result.add(region, indent + Joiner.on("\n").join(middle).replace("\n", "\n" + indent)); for (IEObjectRegion obj : next) { result.add(indent(indentation) + EOBJECT_BEGIN_PADDED + toString(obj)); indentation++; } } return result.toString(); }
@Override protected void getAdditionalProperties(Multimap<String, String> additionalProperties) { super.getAdditionalProperties(additionalProperties); additionalProperties.put("o", organizationName); additionalProperties.put("dc", getCn()); }
@Subscribe public void buildModList(FMLLoadEvent event) { this.modList = loader.getIndexedModList(); Builder<String, EventBus> eventBus = ImmutableMap.builder(); for (ModContainer mod : loader.getModList()) { EventBus bus = new EventBus(mod.getModId()); boolean isActive = mod.registerBus(bus, this); if (isActive) { Level level = Logger.getLogger(mod.getModId()).getLevel(); FMLLog.log( mod.getModId(), Level.FINE, "Mod Logging channel %s configured at %s level.", level == null ? "default" : level); FMLLog.log(mod.getModId(), Level.INFO, "Activating mod %s", mod.getModId()); activeModList.add(mod); modStates.put(mod.getModId(), ModState.UNLOADED); eventBus.put(mod.getModId(), bus); } else { FMLLog.log( mod.getModId(), Level.WARNING, "Mod %s has been disabled through configuration", mod.getModId()); modStates.put(mod.getModId(), ModState.UNLOADED); modStates.put(mod.getModId(), ModState.DISABLED); } } eventChannels = eventBus.build(); }
private Query<MongoSensor> applyFilters(Query<MongoSensor> q, Set<PropertyFilter> filters) { if (filters == null || filters.isEmpty()) { return q; } Multimap<String, Object> map = LinkedListMultimap.create(); for (PropertyFilter f : filters) { String field = f.getField(); String value = f.getValue(); // "123" != 123 && "true" != true in MongoDB... if (field != null && value != null) { field = MongoUtils.path(MongoSensor.PROPERTIES, field); if (isTrue(value)) { map.put(field, true); } else if (isFalse(value)) { map.put(field, false); } else if (isNumeric(value)) { map.put(field, Double.valueOf(value)); } else { map.put(field, value); } } } q.disableValidation(); map.keySet() .stream() .forEach( (field) -> { q.field(field).in(map.get(field)); }); return q.enableValidation(); }
/** * Connects cfgNode to the proper CATCH block if target subtree might throw an exception. If there * are FINALLY blocks reached before a CATCH, it will make the corresponding entry in finallyMap. */ private void connectToPossibleExceptionHandler(Node cfgNode, Node target) { if (mayThrowException(target) && !exceptionHandler.isEmpty()) { Node lastJump = cfgNode; for (Node handler : exceptionHandler) { if (NodeUtil.isFunction(handler)) { return; } Preconditions.checkState(handler.getType() == Token.TRY); Node catchBlock = NodeUtil.getCatchBlock(handler); if (!NodeUtil.hasCatchHandler(catchBlock)) { // No catch but a FINALLY. if (lastJump == cfgNode) { createEdge(cfgNode, Branch.ON_EX, handler.getLastChild()); } else { finallyMap.put(lastJump, handler.getLastChild()); } } else { // Has a catch. if (lastJump == cfgNode) { createEdge(cfgNode, Branch.ON_EX, catchBlock); return; } else { finallyMap.put(lastJump, catchBlock); } } lastJump = handler; } } }
public Iterable<TypeAdapterTypes> typeAdapters() { Multimap<AbstractDeclaring, ValueType> byDeclaring = HashMultimap.create(); for (ValueType value : values().values()) { Protoclass protoclass = value.constitution.protoclass(); if (protoclass.kind().isValue()) { Optional<AbstractDeclaring> typeAdaptersProvider = protoclass.typeAdaptersProvider(); if (typeAdaptersProvider.isPresent()) { byDeclaring.put(typeAdaptersProvider.get(), value); } else if (protoclass.gsonTypeAdapters().isPresent() && protoclass.declaringType().isPresent()) { DeclaringType topLevel = protoclass.declaringType().get().associatedTopLevel(); byDeclaring.put(topLevel, value); } } } ImmutableList.Builder<TypeAdapterTypes> builder = ImmutableList.builder(); for (Entry<AbstractDeclaring, Collection<ValueType>> entry : byDeclaring.asMap().entrySet()) { builder.add( ImmutableTypeAdapterTypes.builder() .definedBy(entry.getKey()) .addAllTypes(entry.getValue()) .build()); } return builder.build(); }
private void handleReturn(Node node) { Node lastJump = null; for (Iterator<Node> iter = exceptionHandler.iterator(); iter.hasNext(); ) { Node curHandler = iter.next(); if (NodeUtil.isFunction(curHandler)) { break; } if (NodeUtil.hasFinally(curHandler)) { if (lastJump == null) { createEdge(node, Branch.UNCOND, curHandler.getLastChild()); } else { finallyMap.put(lastJump, computeFallThrough(curHandler.getLastChild())); } lastJump = curHandler; } } if (node.hasChildren()) { connectToPossibleExceptionHandler(node, node.getFirstChild()); } if (lastJump == null) { createEdge(node, Branch.UNCOND, null); } else { finallyMap.put(lastJump, null); } }
public GraphIndex(Graph graph) { LOG.info("Indexing graph..."); for (String feedId : graph.getFeedIds()) { for (Agency agency : graph.getAgencies(feedId)) { Map<String, Agency> agencyForId = agenciesForFeedId.getOrDefault(feedId, new HashMap<>()); agencyForId.put(agency.getId(), agency); this.agenciesForFeedId.put(feedId, agencyForId); } } Collection<Edge> edges = graph.getEdges(); /* We will keep a separate set of all vertices in case some have the same label. * Maybe we should just guarantee unique labels. */ Set<Vertex> vertices = Sets.newHashSet(); for (Edge edge : edges) { vertices.add(edge.getFromVertex()); vertices.add(edge.getToVertex()); if (edge instanceof TablePatternEdge) { TablePatternEdge patternEdge = (TablePatternEdge) edge; TripPattern pattern = patternEdge.getPattern(); patternForId.put(pattern.code, pattern); } } for (Vertex vertex : vertices) { vertexForId.put(vertex.getLabel(), vertex); if (vertex instanceof TransitStop) { TransitStop transitStop = (TransitStop) vertex; Stop stop = transitStop.getStop(); stopForId.put(stop.getId(), stop); stopVertexForStop.put(stop, transitStop); stopsForParentStation.put(stop.getParentStation(), stop); } } for (TransitStop stopVertex : stopVertexForStop.values()) { Envelope envelope = new Envelope(stopVertex.getCoordinate()); stopSpatialIndex.insert(envelope, stopVertex); } for (TripPattern pattern : patternForId.values()) { patternsForFeedId.put(pattern.getFeedId(), pattern); patternsForRoute.put(pattern.route, pattern); for (Trip trip : pattern.getTrips()) { patternForTrip.put(trip, pattern); tripForId.put(trip.getId(), trip); } for (Stop stop : pattern.getStops()) { patternsForStop.put(stop, pattern); } } for (Route route : patternsForRoute.asMap().keySet()) { routeForId.put(route.getId(), route); } // Copy these two service indexes from the graph until we have better ones. calendarService = graph.getCalendarService(); serviceCodes = graph.serviceCodes; this.graph = graph; LOG.info("Done indexing graph."); }
private void handleContinue(Node node) { String label = null; if (node.hasChildren()) { label = node.getFirstChild().getString(); } Node cur; Node lastJump; // Similar to handBreak's logic with a few minor variation. Node parent = node.getParent(); for (cur = node, lastJump = node; !isContinueTarget(cur, parent, label); cur = parent, parent = parent.getParent()) { if (cur.getType() == Token.TRY && NodeUtil.hasFinally(cur)) { if (lastJump == node) { createEdge(lastJump, Branch.UNCOND, cur.getLastChild()); } else { finallyMap.put(lastJump, computeFallThrough(cur.getLastChild())); } lastJump = cur; } Preconditions.checkState(parent != null, "Cannot find continue target."); } Node iter = cur; if (cur.getChildCount() == 4) { iter = cur.getFirstChild().getNext().getNext(); } if (lastJump == node) { createEdge(node, Branch.UNCOND, iter); } else { finallyMap.put(lastJump, iter); } }
@Override public void process(AmazonUnboxItem item) { if (AmazonUnboxItem.isBrand(item)) { String uri = AmazonUnboxContentExtractor.createBrandUri(item.getAsin()); contentUriToTitleMap.put(uri, item.getTitle()); } if (AmazonUnboxItem.isSeries(item)) { String uri = AmazonUnboxContentExtractor.createSeriesUri(item.getAsin()); if (item.getSeriesAsin() != null) { String brandUri = AmazonUnboxContentExtractor.createBrandUri(item.getAsin()); brandUriToSeriesUrisMap.put(brandUri, uri); } contentUriToTitleMap.put(uri, item.getTitle()); } if (AmazonUnboxItem.isEpisode(item)) { String uri = AmazonUnboxContentExtractor.createEpisodeUri(item.getAsin()); if (item.getSeasonAsin() != null) { String seriesUri = AmazonUnboxContentExtractor.createSeriesUri(item.getSeasonAsin()); seriesUriToEpisodeUrisMap.put(seriesUri, uri); } if (item.getSeriesAsin() != null) { String brandUri = AmazonUnboxContentExtractor.createBrandUri(item.getAsin()); brandUriToSeriesUrisMap.put(brandUri, uri); } contentUriToTitleMap.put(uri, item.getTitle()); } }
public void errorOccurred(ModContainer modContainer, Throwable exception) { if (exception instanceof InvocationTargetException) { errors.put(modContainer.getModId(), ((InvocationTargetException) exception).getCause()); } else { errors.put(modContainer.getModId(), exception); } }
public <S, P, R, D extends Pda<S, P>> D filterEdges( Pda<S, P> pda, Traverser<? super Pda<S, P>, S, R> traverser, PdaFactory<D, S, P, S> factory) { HashStack<TraversalItem<S, R>> trace = new HashStack<TraversalItem<S, R>>(); R previous = traverser.enter(pda, pda.getStart(), null); if (previous == null) return factory == null ? null : factory.create(pda.getStart(), pda.getStop()); Map<S, Integer> distances = new NfaUtil().distanceToFinalStateMap(pda); MappedComparator<S, Integer> distanceComp = new MappedComparator<S, Integer>(distances); trace.push(newItem(pda, distanceComp, distances, pda.getStart(), previous)); Multimap<S, S> edges = LinkedHashMultimap.create(); HashSet<S> states = Sets.newLinkedHashSet(); HashSet<Pair<S, R>> success = Sets.newLinkedHashSet(); states.add(pda.getStart()); states.add(pda.getStop()); ROOT: while (!trace.isEmpty()) { TraversalItem<S, R> current = trace.peek(); while (current.followers.hasNext()) { S next = current.followers.next(); R item = traverser.enter(pda, next, current.data); if (item != null) { if ((next == pda.getStop() && traverser.isSolution(item)) || success.contains(Tuples.create(next, item))) { S s = null; for (TraversalItem<S, R> i : trace) { if (s != null) edges.put(s, i.state); states.add(i.state); success.add(Tuples.create(i.state, i.data)); s = i.state; } edges.put(s, next); } else { if (trace.push(newItem(pda, distanceComp, distances, next, item))) continue ROOT; } } } trace.pop(); } if (factory == null) return null; D result = factory.create(pda.getStart(), pda.getStop()); Map<S, S> old2new = Maps.newLinkedHashMap(); old2new.put(pda.getStart(), result.getStart()); old2new.put(pda.getStop(), result.getStop()); for (S old : states) { if (old == pda.getStart() || old == pda.getStop()) continue; else if (pda.getPop(old) != null) old2new.put(old, factory.createPop(result, old)); else if (pda.getPush(old) != null) old2new.put(old, factory.createPush(result, old)); else old2new.put(old, factory.createState(result, old)); } for (S old : states) { List<S> followers = Lists.newArrayList(); for (S f : edges.get(old)) followers.add(old2new.get(f)); factory.setFollowers(result, old2new.get(old), followers); } return result; }
@Test public void whenCreateMultimap_thenCreated() { final Multimap<String, String> multimap = ArrayListMultimap.create(); multimap.put("fruit", "apple"); multimap.put("fruit", "banana"); multimap.put("pet", "cat"); multimap.put("pet", "dog"); assertThat(multimap.get("fruit"), containsInAnyOrder("apple", "banana")); assertThat(multimap.get("pet"), containsInAnyOrder("cat", "dog")); }
@Before public void setUp() throws Exception { Multimap<String, String> headers = LinkedListMultimap.<String, String>create(); headers.put("content-type", "image/png"); headers.put("filename", "x.png"); headers.put("x-rftest", "set"); headers.put("x-rftest", "of"); headers.put("x-rftest", "values"); uploadResource = environment.createMock(MockUploadResource.class); uploadedFile = new UploadedFile25("form:fileUpload", uploadResource, headers); }
/** * Register a check for a specified token name * * @param token the name of the token * @param check the check to register */ private void registerCheck(String token, Check check) { if (check.isCommentNodesRequired()) { tokenToCommentChecks.put(token, check); } else if (Utils.isCommentType(token)) { final String message = String.format( "Check '%s' waits for comment type " + "token ('%s') and should override 'isCommentNodesRequred()' " + "method to return 'true'", check.getClass().getName(), token); LOG.warn(message); } else { tokenToOrdinaryChecks.put(token, check); } }
/** * For unique local packages. Returns the cached LocalPkgInfo for the requested type. Loads it * from disk if not cached. * * @param filter {@link PkgType#PKG_TOOLS} or {@link PkgType#PKG_PLATFORM_TOOLS} or {@link * PkgType#PKG_DOC}. * @return null if the package is not installed. */ @Nullable public LocalPkgInfo getPkgInfo(@NonNull PkgType filter) { if (filter != PkgType.PKG_TOOLS && filter != PkgType.PKG_PLATFORM_TOOLS && filter != PkgType.PKG_DOC && filter != PkgType.PKG_NDK) { assert false; return null; } LocalPkgInfo info = null; synchronized (mLocalPackages) { Collection<LocalPkgInfo> existing = mLocalPackages.get(filter); assert existing.size() <= 1; if (!existing.isEmpty()) { return existing.iterator().next(); } File uniqueDir = new File(mSdkRoot, filter.getFolderName()); if (!mVisitedDirs.containsEntry(filter, new LocalDirInfo.MapComparator(uniqueDir))) { switch (filter) { case PKG_TOOLS: info = scanTools(uniqueDir); break; case PKG_PLATFORM_TOOLS: info = scanPlatformTools(uniqueDir); break; case PKG_DOC: info = scanDoc(uniqueDir); break; case PKG_NDK: info = scanNdk(uniqueDir); default: break; } } // Whether we have found a valid pkg or not, this directory has been visited. mVisitedDirs.put(filter, new LocalDirInfo(mFileOp, uniqueDir)); if (info != null) { mLocalPackages.put(filter, info); } } return info; }
// Entry format "revision;member;renamedMethod" public void executeOptimizedRenamings(String csvFile) { // revision;member;renamed method try { // grouping entries that share same revision Multimap<String, String> groups = ArrayListMultimap.create(); File revFile = new File(csvFile); BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(new FileInputStream(revFile.getAbsolutePath()))); String line; while ((line = bufferedReader.readLine()) != null) { String entry = line; String[] columns = entry.split(";"); String revDir = columns[0]; groups.put(revDir, entry); } bufferedReader.close(); // executing entries that share same revision ArrayList<String> sharedRevisionEntryList = new ArrayList<String>(); for (String keyRevDir : groups.keySet()) { for (String currentEntry : groups.get(keyRevDir)) { sharedRevisionEntryList.add(currentEntry); } executeGroupedEntries(sharedRevisionEntryList); sharedRevisionEntryList.clear(); } } catch (IOException e1) { e1.printStackTrace(); } }
static Set<String> sanityCheckDatacenters( Cassandra.Client client, int desiredRf, boolean safetyDisabled) throws InvalidRequestException, TException { ensureTestKeyspaceExists(client); Multimap<String, String> dataCenterToRack = HashMultimap.create(); List<TokenRange> ring = client.describe_ring(CassandraConstants.SIMPLE_RF_TEST_KEYSPACE); for (TokenRange tokenRange : ring) { for (EndpointDetails details : tokenRange.getEndpoint_details()) { dataCenterToRack.put(details.datacenter, details.rack); } } if (dataCenterToRack.size() == 1) { String dc = dataCenterToRack.keySet().iterator().next(); String rack = dataCenterToRack.values().iterator().next(); if (dc.equals(CassandraConstants.DEFAULT_DC) && rack.equals(CassandraConstants.DEFAULT_RACK) && desiredRf > 1) { // We don't allow greater than RF=1 because they didn't set up their network. logErrorOrThrow( "The cassandra cluster is not set up to be datacenter and rack aware. " + "Please set this up before running with a replication factor higher than 1.", safetyDisabled); } } return dataCenterToRack.keySet(); }
private static Multimap<String, Symbol> sortByName(List<Symbol> members) { Multimap<String, Symbol> membersByName = LinkedListMultimap.create(); for (Symbol member : members) { membersByName.put(member.name(), member); } return membersByName; }
private void checkAssignment(AbstractElement object, String feature) { if (assignedFeatures.containsKey(feature)) { Collection<AbstractElement> sources = Lists.newArrayList(assignedFeatures.get(feature)); assignedFeatures.replaceValues(feature, Collections.<AbstractElement>emptyList()); if (sources != null && sources.equals(Collections.singletonList(object))) { if (getNestingLevel() == 0) acceptWarning( "The assigned value of feature '" + feature + "' will possibly override itself because it is used inside of a loop.", object, null); } else { if (sources != null) { if (getNestingLevel() == 0) for (AbstractElement source : sources) acceptWarning( "The possibly assigned value of feature '" + feature + "' may be overridden by subsequent assignments.", source, null); } if (getNestingLevel() == 0) acceptWarning( "This assignment will override the possibly assigned value of feature '" + feature + "'.", object, null); } } else { assignedFeatures.put(feature, object); } }
public Multimap func_111205_h() { Multimap var1 = super.func_111205_h(); var1.put( SharedMonsterAttributes.field_111264_e.func_111108_a(), new AttributeModifier(field_111210_e, "Tool modifier", (double) this.field_77865_bY, 0)); return var1; }
/** Gets a map of item attribute modifiers, used by ItemSword to increase hit damage. */ public Multimap getItemAttributeModifiers() { Multimap multimap = super.getItemAttributeModifiers(); multimap.put( SharedMonsterAttributes.attackDamage.getAttributeUnlocalizedName(), new AttributeModifier(field_111210_e, "Tool modifier", (double) this.damageVsEntity, 0)); return multimap; }
public Multimap<Node, Split> computeAssignments(Set<Split> splits) { Multimap<Node, Split> assignment = HashMultimap.create(); for (Split split : splits) { List<Node> candidateNodes; if (locationAwareScheduling) { candidateNodes = selectCandidateNodes(nodeMap.get().get(), split); } else { candidateNodes = selectRandomNodes(minCandidates); } checkCondition( !candidateNodes.isEmpty(), NO_NODES_AVAILABLE, "No nodes available to run query"); Node chosen = null; int min = Integer.MAX_VALUE; for (Node node : candidateNodes) { RemoteTask task = taskMap.get(node); int currentSplits = (task == null) ? 0 : task.getQueuedSplits(); int assignedSplits = currentSplits + assignment.get(node).size(); if (assignedSplits < min && assignedSplits < maxPendingSplitsPerTask) { chosen = node; min = assignedSplits; } } if (chosen != null) { assignment.put(chosen, split); } } return assignment; }
/** * Gets the feed mapping for a feed. * * @return a multimap from feed attribute ID to the set of field IDs mapped to the attribute */ private static Multimap<Long, Integer> getFeedMapping( AdWordsServices adWordsServices, AdWordsSession session, Feed feed, long placeholderType) throws Exception { // Get the FeedMappingService. FeedMappingServiceInterface feedMappingService = adWordsServices.get(session, FeedMappingServiceInterface.class); String query = String.format( "SELECT FeedMappingId, AttributeFieldMappings WHERE FeedId = %d and PlaceholderType = %d " + "AND Status = 'ENABLED'", feed.getId(), placeholderType); Multimap<Long, Integer> attributeMappings = HashMultimap.create(); int offset = 0; FeedMappingPage feedMappingPage; do { String pageQuery = String.format(query + " LIMIT %d, %d", offset, PAGE_SIZE); feedMappingPage = feedMappingService.query(pageQuery); if (feedMappingPage.getEntries() != null) { // Normally, a feed attribute is mapped only to one field. However, you may map it to more // than one field if needed. for (FeedMapping feedMapping : feedMappingPage.getEntries()) { for (AttributeFieldMapping attributeMapping : feedMapping.getAttributeFieldMappings()) { attributeMappings.put( attributeMapping.getFeedAttributeId(), attributeMapping.getFieldId()); } } } offset += PAGE_SIZE; } while (offset < feedMappingPage.getTotalNumEntries()); return attributeMappings; }
private ImmutableList<Path> conditionallyCopy(ImmutableList<Path> roots) throws IOException { final Builder<Path> builder = ImmutableList.builder(); for (Path root : roots) { Preconditions.checkArgument( root.startsWith(workingDirectory), root + " must start with root " + workingDirectory + " from " + roots); Preconditions.checkArgument( !root.equals(workingDirectory), "Cannot deduplicate root directory: " + root + " from " + roots); if (!seen.containsKey(root)) { seen.put(root, null); final Path newRoot = out.resolve(workingDirectory.relativize(root)); Files.walkFileTree( root, ImmutableSet.of(FileVisitOption.FOLLOW_LINKS), Integer.MAX_VALUE, new ConditionalCopyVisitor(newRoot, root, seen, hashFunction)); builder.add(newRoot); } else { // Duplicated directories are ok -- multiple files from different libraries // can reside in the same directory, but duplicate files should not be seen mulitple times. } } return builder.build(); }
private Multimap<String, TaggedRegion> makeTags(Collection<TaggedRegion> tags) { Multimap<String, TaggedRegion> ret = ArrayListMultimap.create(); for (TaggedRegion r : tags) { ret.put(r.getName(), r); } return ret; }
protected void updateSuperProjects(ReviewDb db, Set<Branch.NameKey> updatedBranches) throws SubmoduleException { try { // These (repo/branch) will be updated later with all the given // individual submodule subscriptions Multimap<Branch.NameKey, SubmoduleSubscription> targets = HashMultimap.create(); for (Branch.NameKey updatedBranch : updatedBranches) { for (SubmoduleSubscription sub : db.submoduleSubscriptions().bySubmodule(updatedBranch)) { targets.put(sub.getSuperProject(), sub); } } updatedSubscribers.addAll(updatedBranches); // Update subscribers. for (Branch.NameKey dest : targets.keySet()) { try { if (!updatedSubscribers.add(dest)) { log.error("Possible circular subscription involving " + dest); } else { updateGitlinks(db, dest, targets.get(dest)); } } catch (SubmoduleException e) { log.warn("Cannot update gitlinks for " + dest, e); } } } catch (OrmException e) { logAndThrowSubmoduleException("Cannot read subscription records", e); } }
public String configFile() { // Check template URL exists String templateUrl = driver.getEntity().getConfig(NginxController.SERVER_CONF_TEMPLATE_URL); ResourceUtils.create(this).checkUrlExists(templateUrl); // Check SSL configuration ProxySslConfig ssl = driver.getEntity().getConfig(NginxController.SSL_CONFIG); if (ssl != null && Strings.isEmpty(ssl.getCertificateDestination()) && Strings.isEmpty(ssl.getCertificateSourceUrl())) { throw new IllegalStateException( "ProxySslConfig can't have a null certificateDestination and null certificateSourceUrl. One or both need to be set"); } // For mapping by URL Iterable<UrlMapping> mappings = ((NginxController) driver.getEntity()).getUrlMappings(); Multimap<String, UrlMapping> mappingsByDomain = LinkedHashMultimap.create(); for (UrlMapping mapping : mappings) { Collection<String> addrs = mapping.getAttribute(UrlMapping.TARGET_ADDRESSES); if (addrs != null && addrs.size() > 0) { mappingsByDomain.put(mapping.getDomain(), mapping); } } Map<String, Object> substitutions = MutableMap.<String, Object>builder() .putIfNotNull("ssl", ssl) .put("urlMappings", mappings) .put("domainMappings", mappingsByDomain) .build(); // Get template contents and process String contents = ResourceUtils.create(driver.getEntity()).getResourceAsString(templateUrl); return TemplateProcessor.processTemplateContents(contents, driver, substitutions); }
private List<Cluster> doPrivilegedLookup(String partitionName, String vmTypeName) throws NotEnoughResourcesException { if (Partition.DEFAULT_NAME.equals(partitionName)) { Iterable<Cluster> authorizedClusters = Iterables.filter( Clusters.getInstance().listValues(), RestrictedTypes.filterPrivilegedWithoutOwner()); Multimap<VmTypeAvailability, Cluster> sorted = TreeMultimap.create(); for (Cluster c : authorizedClusters) { sorted.put(c.getNodeState().getAvailability(vmTypeName), c); } if (sorted.isEmpty()) { throw new NotEnoughResourcesException( "Not enough resources: no availability zone is available in which you have permissions to run instances."); } else { return Lists.newArrayList(sorted.values()); } } else { ServiceConfiguration ccConfig = Topology.lookup(ClusterController.class, Partitions.lookupByName(partitionName)); Cluster cluster = Clusters.lookup(ccConfig); if (cluster == null) { throw new NotEnoughResourcesException("Can't find cluster " + partitionName); } if (!RestrictedTypes.filterPrivilegedWithoutOwner().apply(cluster)) { throw new NotEnoughResourcesException("Not authorized to use cluster " + partitionName); } return Lists.newArrayList(cluster); } }
private void callParserDefault( String[] sources, String[] encodings, String[] classPaths, String rootDir, String className, String renamedMethod, Multimap<String, String> invokers) throws IOException { File directory = new File(rootDir); File[] fList = directory.listFiles(); for (File file : fList) { if (file.isDirectory()) { callParserDefault( sources, encodings, classPaths, file.getAbsolutePath(), className, renamedMethod, invokers); } else { if (FilenameUtils.getExtension(file.getAbsolutePath()).equalsIgnoreCase("java")) { List<String> invocations = createParserAndFindMethodReferences( sources, encodings, classPaths, file, className, renamedMethod); for (String method : invocations) { invokers.put(file.getAbsolutePath(), method); } } } } }