/** * Filter a list of VolumeDescriptors by type(s). * * @param descriptors -- Original list. * @param inclusive -- Types to be included (or null if not used). * @param exclusive -- Types to be excluded (or null if not used). * @return List<VolumeDescriptor> */ public static List<VolumeDescriptor> filterByType( List<VolumeDescriptor> descriptors, Type[] inclusive, Type[] exclusive) { List<VolumeDescriptor> result = new ArrayList<VolumeDescriptor>(); if (descriptors == null) { return result; } HashSet<Type> included = new HashSet<Type>(); if (inclusive != null) { included.addAll(Arrays.asList(inclusive)); } HashSet<Type> excluded = new HashSet<Type>(); if (exclusive != null) { excluded.addAll(Arrays.asList(exclusive)); } for (VolumeDescriptor desc : descriptors) { if (excluded.contains(desc.getType())) { continue; } if (included.isEmpty() || included.contains(desc.getType())) { result.add(desc); } } return result; }
private List<WasInfluencedBy> createInfluence( QualifiedName context, QualifiedName qualifiedName) { HashSet<QualifiedName> all_influencers = new HashSet<QualifiedName>(); List<QualifiedName> influencers = getObjects(context, qualifiedName, onto.QualifiedName_PROVO_influencer); List<QualifiedName> agents = getObjects(context, qualifiedName, onto.QualifiedName_PROVO_agent); List<QualifiedName> entities = getObjects(context, qualifiedName, onto.QualifiedName_PROVO_entity); List<QualifiedName> activities = getObjects(context, qualifiedName, onto.QualifiedName_PROVO_activity); all_influencers.addAll(influencers); all_influencers.addAll(agents); all_influencers.addAll(entities); all_influencers.addAll(activities); List<QualifiedName> influencees = getSubjects(context, onto.QualifiedName_PROVO_qualifiedInfluence, qualifiedName); List<Attribute> attributes = collectAttributes(context, qualifiedName, Types.ProvType.INFLUENCE); qualifiedName = getQualqualifiedName(qualifiedName); List<WasInfluencedBy> wibs = new ArrayList<WasInfluencedBy>(); List<List<?>> perms = permute(influencees, new ArrayList<QualifiedName>(all_influencers)); for (List<?> perm : perms) { WasInfluencedBy wib = pFactory.newWasInfluencedBy( qualifiedName, (QualifiedName) perm.get(0), (QualifiedName) perm.get(1), attributes); store(context, wib); wibs.add(wib); } return wibs; }
public static void updateSensors(MMMinecart minecart, MMMinecart input) { HashSet<Block> blockList = minecart.getAdjacentBlocks(1); blockList.addAll(minecart.getBlocksBeneath(3)); HashSet<Block> oldBlockList = minecart.getPreviousLocationAdjacentBlocks(1); oldBlockList.addAll(minecart.getPreviousLocationBlocksBeneath(3)); oldBlockList.removeAll(blockList); // Activate new sensors for (Block block : blockList) { Sensor s = SensorManager.getSensor(block); if (s != null) { try { s.input(input); } catch (Exception e) { SensorManager.delSensor(s.getLocation()); } } } // deactivate old sensors for (Block block : oldBlockList) { Sensor s = SensorManager.getSensor(block); if (s != null) { try { s.input(null); } catch (Exception e) { SensorManager.delSensor(s.getLocation()); } } } }
@Override public void execute() { clearLocalCache(); session = VeggieDinner.getSession(); if (session == null || session.getUser() == null) return; me = VeggieDinner.getUser(session.getUser()); if (me == null) { VeggieDinner.requestNewUser(session.getUser(), populateLocalCache); return; } boolean hasAll = true; myGroupDTOs.addAll(VeggieDinner.getGroups(me.getGroups()).values()); myEventDTOs.addAll(VeggieDinner.getEvents(me.getSchedule()).values()); myEventDTOs.addAll(VeggieDinner.getEvents(getEventsFromGroups(myGroupDTOs)).values()); myCohosts.addAll(VeggieDinner.getUsers(getHostsFromGroups(myGroupDTOs)).values()); if (!myGroupDTOs.containsAll(me.getGroups())) { VeggieDinner.requestNewGroup(me.getGroups(), populateLocalCache); return; } if (!myEventDTOs.containsAll(me.getSchedule()) || !myEventDTOs.containsAll(getEventsFromGroups(myGroupDTOs))) { HashSet<Key<EventDTO>> events = getEventsFromGroups(myGroupDTOs); events.addAll(me.getSchedule()); VeggieDinner.requestNewEvent(events, populateLocalCache); return; } if (!myCohosts.containsAll(getHostsFromGroups(myGroupDTOs))) { VeggieDinner.requestNewUser(getHostsFromGroups(myGroupDTOs), populateLocalCache); return; } DeferredCommand.addCommand(updateMyStuff); }
@Override public synchronized ClassHierarchy merge(final ClassHierarchy ch) { if (this == ch) { return this; } if (!(ch instanceof ClassHierarchyImpl)) { throw new UnsupportedOperationException( "Can't merge java and non-java class hierarchies yet!"); } if (this.jars.size() == 0) { return ch; } final ClassHierarchyImpl chi = (ClassHierarchyImpl) ch; final HashSet<URL> otherJars = new HashSet<>(); otherJars.addAll(chi.jars); final HashSet<URL> myJars = new HashSet<>(); myJars.addAll(this.jars); if (myJars.containsAll(otherJars)) { return this; } else if (otherJars.containsAll(myJars)) { return ch; } else { myJars.addAll(otherJars); return new ClassHierarchyImpl(myJars.toArray(new URL[0])); } }
/** * Uses recurson to get the depth of the input synset (in 'getSynsetDepth()') from a <root> A * synset may have multiple parents, thus we returneach possible depth and 'home hierarchy' <root> * Thus, we may have the same <root> at different depths in the WordNet hierarchy */ private void treecreeper( int depth, HashSet<ISynsetID> synsets, TreeMap<Integer, HashSet<ISynsetID>> depths, ArrayList<Integer> roots) { depth++; ISynset synset = null; HashSet<ISynsetID> hypernyms = new HashSet<ISynsetID>(); // next 'level'(inverse of 'depth') for (ISynsetID s : synsets) { synset = dict.getSynset(s); hypernyms.addAll(synset.getRelatedSynsets(Pointer.HYPERNYM)); // get the <hypernyms> hypernyms.addAll( synset.getRelatedSynsets(Pointer.HYPERNYM_INSTANCE)); // get the <hypernyms> (instances) } if (!hypernyms.isEmpty()) { for (ISynsetID h : hypernyms) { int offset = h.getOffset(); if (roots.contains(offset)) { if (depths.containsKey(depth)) { HashSet<ISynsetID> deep = depths.get(depth); deep.add(h); depths.put(depth, deep); } else { HashSet<ISynsetID> deep = new HashSet<ISynsetID>(); deep.add(h); depths.put(depth, deep); } } } treecreeper(depth, hypernyms, depths, roots); } return; }
public SemanticHeadFinder(TreebankLanguagePack tlp, boolean cop) { super(tlp); ruleChanges(); // make a distinction between auxiliaries and copular verbs to // get the NP has semantic head in sentences like "Bill is an honest man". verbalAuxiliaries = new HashSet<String>(); verbalAuxiliaries.addAll( Arrays.asList( new String[] { "will", "wo", "shall", "may", "might", "should", "would", "can", "could", "ca", "must", "has", "have", "had", "having", "be", "being", "been", "get", "gets", "getting", "got", "gotten", "do", "does", "did", "to", "'ve", "'d", "'ll" })); // copular verbs having an NP complement copulars = new HashSet<String>(); if (cop) { copulars.addAll( Arrays.asList( new String[] { "be", "being", "Being", "am", "are", "is", "was", "were", "'m", "'re", "'s", "s", "seem", "seems", "seemed", "appear", "appears", "appeared", "stay", "stays", "stayed", "remain", "remains", "remained", "resemble", "resembles", "resembled", "become", "becomes", "became" })); } // a few times the apostrophe is missing on "'s" verbalTags = new HashSet<String>(); // include Charniak tags so can do BLLIP right verbalTags.addAll( Arrays.asList( new String[] {"TO", "MD", "VB", "VBD", "VBP", "VBZ", "VBG", "VBN", "AUX", "AUXG"})); }
public Collection<Property> getProperties(JClassType type) { HashSet<Property> properties = new HashSet<Property>(); properties.addAll(MethodProperty.findProperties(type)); properties.addAll(FieldProperty.findProperties(type)); return properties; }
public HashSet<String> getUnstemmedFilterSet() { if (unstemmedfilterset == null) { unstemmedfilterset = new HashSet<String>(); unstemmedfilterset.addAll(StopWordsSmart.getUnstemmedFilterSet()); unstemmedfilterset.addAll(StopWordsLetter.getUnstemmedFilterSet()); } return unstemmedfilterset; }
/** returns all GeoElement objects in the both coordinate subtrees */ public HashSet getVariables() { HashSet temp, varset = x.getVariables(); if (varset == null) varset = new HashSet(); temp = y.getVariables(); if (temp != null) varset.addAll(temp); temp = z.getVariables(); if (temp != null) varset.addAll(temp); return varset; }
public Set<K> keySet() { if (parent == null) { return inner.keySet(); } HashSet<K> tmp = new HashSet<K>(); tmp.addAll(parent.keySet()); tmp.addAll(inner.keySet()); return tmp; }
@Override public Enumeration<URL> findResources(String name) throws IOException { final HashSet<URL> urls = new HashSet<>(); for (PomClassLoader cl : parents) { urls.addAll(Collections.list(cl.findResources(name))); } urls.addAll(Collections.list(super.findResources(name))); return Collections.enumeration(urls); }
/** Get an alphabetically sorted list of field names. */ public List<String> getFieldNames() { HashSet<String> names = new HashSet<String>(); names.addAll(storedFields.keySet()); names.addAll(reconstructedFields.keySet()); ArrayList<String> res = new ArrayList<String>(names.size()); res.addAll(names); Collections.sort(res); return res; }
@Override public HashSet<IdentifierExpression> getFreeVariables() { HashSet<IdentifierExpression> ret = new HashSet<IdentifierExpression>(); for (int i = 0; i < this.indices.length; i++) { ret.addAll(this.indices[i].getFreeVariables()); } ret.addAll(this.array.getFreeVariables()); return ret; }
/** * delegate for ServletContext.declareRole method * * @param roleNames role names to add */ protected void addRoles(String... roleNames) { // Get a reference to the SecurityHandler, which must be ConstraintAware if (_securityHandler != null && _securityHandler instanceof ConstraintAware) { HashSet<String> union = new HashSet<String>(); Set<String> existing = ((ConstraintAware) _securityHandler).getRoles(); if (existing != null) union.addAll(existing); union.addAll(Arrays.asList(roleNames)); ((ConstraintSecurityHandler) _securityHandler).setRoles(union); } }
private boolean directedCompute() { SortedSet<Integer> degrees = new TreeSet<Integer>(); for (IElement iE : g.getNodes()) { DirectedNode n = (DirectedNode) iE; int degree = n.getOutDegree(); degrees.add(degree); if (nodesSortedByDegree.containsKey(degree)) { this.nodesSortedByDegree.get(degree).add(n); } else { LinkedList<Node> temp = new LinkedList<>(); temp.add(n); this.nodesSortedByDegree.put(degree, temp); } } HashSet<Node> currentRichClub = new HashSet<Node>(); int currentRichClubSize = 0; int size = degrees.size(); for (int i = 0; i < size; i++) { int currentDegree = degrees.last(); degrees.remove(currentDegree); LinkedList<Node> current = this.nodesSortedByDegree.get(currentDegree); currentRichClubSize += current.size(); this.nodesSortedByDegree.remove(currentDegree); if (currentRichClubSize >= this.richClubSize) { int seperateAT = current.size() - (currentRichClubSize - this.richClubSize); LinkedList<Node> temp = new LinkedList<>(); temp.addAll(current.subList(0, seperateAT)); this.richClub.put(currentDegree, temp); currentRichClub.addAll(temp); LinkedList<Node> temp2 = new LinkedList<>(); temp2.addAll(current.subList(seperateAT, current.size())); if (!temp2.isEmpty()) this.nodesSortedByDegree.put(currentDegree, (LinkedList<Node>) temp2); break; } else { richClub.put(currentDegree, current); currentRichClub.addAll(current); } } for (Node n : currentRichClub) { DirectedNode ne = (DirectedNode) n; for (IElement iE : ne.getOutgoingEdges()) { DirectedEdge e = (DirectedEdge) iE; if (currentRichClub.contains(e.getDst())) { edgesBetweenRichClub++; } } } return true; }
/** * This function takes a class uri and returns the object properties who have this class in their * domain. If second parameter set to True, it also returns the object properties inherited from * parents of the given class. * * @param domainUri * @param inheritance * @return */ public HashSet<String> getObjectPropertiesOfClass(String domainUri, boolean inheritance) { HashSet<String> direct = ontCache.getDirectOutObjectProperties().get(domainUri); if (!inheritance) return direct; HashSet<String> all = new HashSet<String>(); HashSet<String> indirect = ontCache.getIndirectOutObjectProperties().get(domainUri); if (direct != null) all.addAll(direct); if (indirect != null) all.addAll(indirect); return all; }
public Map getObjectToCopyMap() { HashSet objects = new HashSet(baseToBackupVariabilityElementMap.keySet()); objects.addAll(baseToVariabilityElementMap.keySet()); objects.addAll(keySet()); Map map = new HashMap(); for (Iterator iter = objects.iterator(); iter.hasNext(); ) { Object object = iter.next(); map.put(object, getCopy((EObject) object)); } return map; }
public void conjoinAllNeighbors() { if (!this.isCeiling()) { this.conjoinedelevators.clear(); HashSet var1 = new HashSet(); this.conjoinedelevators.addAll(this.getNeighbors()); var1.addAll(this.conjoinedelevators); var1.remove(this); Iterator var2; while (!var1.isEmpty()) { var2 = var1.iterator(); HashSet var3 = new HashSet(); while (var2.hasNext()) { EntityElevator var4 = (EntityElevator) var2.next(); var3.addAll(var4.getNeighbors()); } var1.clear(); var3.removeAll(this.conjoinedelevators); this.conjoinedelevators.addAll(var3); var1.addAll(var3); } var2 = this.conjoinedelevators.iterator(); while (var2.hasNext()) { EntityElevator var5 = (EntityElevator) var2.next(); if (!var5.center && !this.isClient) { var5.center = false; var5.centerElevator = this; if (var5.ceiling != null) { var5.ceiling.centerElevator = this; var5.ceiling.conjoinedHasBeenSet = true; var5.ceiling.center = false; } } else { var5.setConjoined(this.conjoinedelevators); var5.centerElevator = var5; if (var5.ceiling != null) { var5.ceiling.centerElevator = var5; var5.ceiling.conjoinedHasBeenSet = true; } var5.center = true; } } this.conjoinedHasBeenSet = true; } }
/** * Makes index snapshot hashsets * * @return result */ public synchronized boolean makeIndexSnapshot() { if (!indexSnapshotDone && !DumbService.isDumb(myProject)) { KNOWN_SUBS.addAll(PerlSubUtil.getDeclaredSubsNames(myProject)); KNOWN_SUBS.addAll(PerlSubUtil.getDefinedSubsNames(myProject)); KNOWN_SUBS.addAll(PerlGlobUtil.getDefinedGlobsNames(myProject)); KNOWN_PACKAGES.addAll(PerlPackageUtil.getDefinedPackageNames(myProject)); indexSnapshotDone = true; } return indexSnapshotDone; }
public List<String> parse(Tweet t) { match = new HashSet<String>(); List<String> nerresult = ner.parse(t); List<String> stbdresult = stbd.parse(t); List<String> toporesult = tp.parse(t); match.addAll(nerresult); match.addAll(stbdresult); // match.addAll(toporesult); return ParserUtils.ResultReduce(new ArrayList<String>(match)); }
private static void assertPropertyEquals(SNode expectedNode, SNode actualNode) { HashSet<String> propertes = new HashSet<String>(); propertes.addAll(IterableUtil.asCollection(expectedNode.getPropertyNames())); propertes.addAll(IterableUtil.asCollection(actualNode.getPropertyNames())); for (String key : propertes) { String expectedProperty = jetbrains.mps.util.SNodeOperations.getProperties(expectedNode).get(key); String actualProperty = jetbrains.mps.util.SNodeOperations.getProperties(actualNode).get(key); assertEquals( getErrorString("property", expectedNode, actualNode), expectedProperty, actualProperty); } }
public boolean addAll(Set oset) { if (oset.empty) return true; empty = false; if (delegating) { if (oset.delegating) return delegate.addAll(oset.delegate); else return delegate.add(oset.oneState); } else { beginDelegation(); if (oset.delegating) return delegate.addAll(oset.delegate); else return delegate.add(oset.oneState); } }
public static <T> HashSet<T> newHashSet(Iterable<T> iterable) { HashSet<T> newHashSet = new HashSet<T>(); if (iterable instanceof Set<?>) { newHashSet.addAll((Set<T>) iterable); } else if (iterable instanceof List<?>) { newHashSet.addAll((List<T>) iterable); } else { for (T element : iterable) { newHashSet.add(element); } } return newHashSet; }
/** * This method takes a property URI and returns ranges of that property. If @param recursive is * true, it also returns the children of the domains. * * @param propertyUri * @param recursive * @return */ public HashSet<String> getRangesOfProperty(String propertyUri, boolean recursive) { HashSet<String> results = new HashSet<String>(); HashSet<String> direct = null; HashSet<String> indirect = null; direct = ontCache.getPropertyDirectRanges().get(propertyUri); if (direct != null) results.addAll(direct); if (recursive) indirect = ontCache.getPropertyIndirectRanges().get(propertyUri); if (indirect != null) results.addAll(indirect); return results; }
public Cluster(JoinGraph.Node u) { // Constructor for cluster(u) this.node = u; // add to the cluster all CPTs of the given node for (CPF cpf : u.functions) cpts.add(cpf.getDomainProduct()[0]); // add all incoming messages of n for (JoinGraph.Node nb : u.getNeighbors()) { JoinGraph.Arc arc = u.arcs.get(nb); HashSet<MessageFunction> m = arc.getInMessage(u); if (!m.isEmpty()) functions.addAll(m); HashSet<BeliefNode> bn = arc.getCPTInMessage(u); if (!bn.isEmpty()) cpts.addAll(bn); } }
public static ArrayList<String> getLexicalTransformations(String original) { HashSet<String> outs = new HashSet<String>(); if (original.isEmpty()) return new ArrayList<String>(outs); HashSet<String> news = new HashSet<String>(); String canonical = Common.canonicalizeString(original); outs.add(canonical); // now other things String parenthesisLess = Common.removeParenthesis(canonical); String halfSpaceLess = canonical.replace('\u200C', ' '); String spaceLess = canonical.replace(' ', '\u200C'); String puncLess = Common.removePunctuations(canonical); outs.addAll(news); news.clear(); for (String probe : outs) { probe = probe.replaceAll("\\bعلیهالسلام\\b", ""); probe = probe.replaceAll("\\bعلیها السلام\\b", ""); probe = probe.replaceAll("\\bعلیه السلام\\b", ""); probe = probe.replaceAll("\\bعلیهالسلام\\b", ""); probe = probe.replaceAll("\\(\\s*ع\\s*\\)", ""); probe = probe.replaceAll("\\(\\s*س\\s*\\)", ""); probe = probe.replaceAll("\\(\\s*ص\\s*\\)", ""); probe = probe.replaceAll("\\bصلی الله علیه وآله\\b", ""); probe = probe.replaceAll("\\bصلی الله علیه و آله\\b", ""); probe = probe.replaceAll("\\bصلی الله علیه و آله و سلم\\b", ""); probe = probe.replaceAll("\\bصلی الله علیه وآله و سلم\\b", ""); probe = probe.replaceAll("\\bصلی الله علیه وآله وسلم\\b", ""); probe = probe.replace("()", ""); probe = probe.replace("( )", ""); // tokenized version news.addAll(probeLexicalTransformation(probe, outs)); } outs.addAll(news); news.clear(); outs.remove(original); return new ArrayList<String>(outs); }
// Declared in AnonymousClasses.jrag at line 52 private AnonymousDecl rewriteRule0() { { setModifiers(new Modifiers(new List().add(new Modifier("final")))); ConstructorDecl constructor = new ConstructorDecl(); addBodyDecl(constructor); constructor.setModifiers((Modifiers) constructorDecl().getModifiers().fullCopy()); String name = "Anonymous" + nextAnonymousIndex(); setID(name); constructor.setID(name); List parameterList = new List(); for (int i = 0; i < constructorDecl().getNumParameter(); i++) { parameterList.add( new ParameterDeclaration( constructorDecl().getParameter(i).type().createBoundAccess(), constructorDecl().getParameter(i).name())); } constructor.setParameterList(parameterList); List argList = new List(); for (int i = 0; i < constructor.getNumParameter(); i++) argList.add(new VarAccess(constructor.getParameter(i).name())); constructor.setConstructorInvocation( new ExprStmt(new SuperConstructorAccess("super", argList))); constructor.setBlock(new Block()); HashSet set = new HashSet(); for (int i = 0; i < getNumBodyDecl(); i++) { if (getBodyDecl(i) instanceof InstanceInitializer) { InstanceInitializer init = (InstanceInitializer) getBodyDecl(i); set.addAll(init.exceptions()); } else if (getBodyDecl(i) instanceof FieldDeclaration) { FieldDeclaration f = (FieldDeclaration) getBodyDecl(i); if (f.isInstanceVariable()) { set.addAll(f.exceptions()); } } } List exceptionList = new List(); for (Iterator iter = set.iterator(); iter.hasNext(); ) { TypeDecl exceptionType = (TypeDecl) iter.next(); if (exceptionType.isNull()) exceptionType = typeNullPointerException(); exceptionList.add(exceptionType.createQualifiedAccess()); } constructor.setExceptionList(exceptionList); return this; } }
protected Set<Long> getNodeIdsInRelation(Relation r) { HashSet<Long> ret = new HashSet<Long>(); if (r == null) return ret; for (RelationMember m : r.getMembers()) { if (m.isNode()) { ret.add(m.getMember().getId()); } else if (m.isWay()) { ret.addAll(getNodeIdsInWay(m.getWay())); } else if (m.isRelation()) { ret.addAll(getNodeIdsInRelation(m.getRelation())); } } return ret; }
/** * Find the upwards and downwards transitive closure for each node in a graph. Assumes the graph * is a DAG in general, but handles cycles (which may occur in the case of meta-annotations). * Updates the allSubNodes and allSuperNodes fields of each node based on the downwards and * upwards transitive closures respectively. */ public static void findTransitiveClosure(final Collection<? extends DAGNode> nodes) { // Find top nodes as initial active set HashSet<DAGNode> activeTopDownNodes = new HashSet<>(); for (final DAGNode node : nodes) { if (node.directSuperNodes.isEmpty()) { activeTopDownNodes.addAll(node.directSubNodes); } } // Use DP-style "wavefront" to find top-down transitive closure, even if there are cycles while (!activeTopDownNodes.isEmpty()) { final HashSet<DAGNode> activeTopDownNodesNext = new HashSet<>(activeTopDownNodes.size()); for (final DAGNode node : activeTopDownNodes) { boolean changed = node.allSuperNodes.addAll(node.directSuperNodes); for (final DAGNode superNode : node.directSuperNodes) { changed |= node.allSuperNodes.addAll(superNode.allSuperNodes); } if (changed) { for (final DAGNode subNode : node.directSubNodes) { activeTopDownNodesNext.add(subNode); } } } activeTopDownNodes = activeTopDownNodesNext; } // Find bottom nodes as initial active set HashSet<DAGNode> activeBottomUpNodes = new HashSet<>(); for (final DAGNode node : nodes) { if (node.directSubNodes.isEmpty()) { activeBottomUpNodes.addAll(node.directSuperNodes); } } // Use DP-style "wavefront" to find bottom-up transitive closure, even if there are cycles while (!activeBottomUpNodes.isEmpty()) { final HashSet<DAGNode> activeBottomUpNodesNext = new HashSet<>(activeBottomUpNodes.size()); for (final DAGNode node : activeBottomUpNodes) { boolean changed = node.allSubNodes.addAll(node.directSubNodes); for (final DAGNode subNode : node.directSubNodes) { changed |= node.allSubNodes.addAll(subNode.allSubNodes); } if (changed) { for (final DAGNode superNode : node.directSuperNodes) { activeBottomUpNodesNext.add(superNode); } } } activeBottomUpNodes = activeBottomUpNodesNext; } }