public Set /*<PCNode>*/ matchClass(Pattern simple_name_pattern) { Set this_class = matchSpecific(simple_name_pattern); Set this_class_names = new HashSet(); Iterator tsi = this_class.iterator(); while (tsi.hasNext()) { PCNode pc = (PCNode) tsi.next(); this_class_names.add(pc.name); } Iterator pi = parents.iterator(); while (pi.hasNext()) { PCNode parent = (PCNode) pi.next(); // System.out.println("Parent: "+parent); Set parent_class = parent.matchClass(simple_name_pattern); Iterator osi = parent_class.iterator(); while (osi.hasNext()) { PCNode pc = (PCNode) osi.next(); if (!this_class_names.contains(pc.name)) { this_class.add(pc); } } } if (abc.main.Debug.v().namePatternProcessing) System.out.println(this + ".matchClass " + simple_name_pattern.pattern() + ": " + this_class); return this_class; }
static { BAD_GLOBALS.add("db"); BAD_GLOBALS.add("local"); BAD_GLOBALS.add("core"); BAD_GLOBALS.add("args"); // TODO: should we get rid of this BAD_GLOBALS.add("obj"); // TODO: get rid of this }
public GraphIndex(Graph graph) { LOG.info("Indexing graph..."); for (String feedId : graph.getFeedIds()) { for (Agency agency : graph.getAgencies(feedId)) { Map<String, Agency> agencyForId = agenciesForFeedId.getOrDefault(feedId, new HashMap<>()); agencyForId.put(agency.getId(), agency); this.agenciesForFeedId.put(feedId, agencyForId); } } Collection<Edge> edges = graph.getEdges(); /* We will keep a separate set of all vertices in case some have the same label. * Maybe we should just guarantee unique labels. */ Set<Vertex> vertices = Sets.newHashSet(); for (Edge edge : edges) { vertices.add(edge.getFromVertex()); vertices.add(edge.getToVertex()); if (edge instanceof TablePatternEdge) { TablePatternEdge patternEdge = (TablePatternEdge) edge; TripPattern pattern = patternEdge.getPattern(); patternForId.put(pattern.code, pattern); } } for (Vertex vertex : vertices) { vertexForId.put(vertex.getLabel(), vertex); if (vertex instanceof TransitStop) { TransitStop transitStop = (TransitStop) vertex; Stop stop = transitStop.getStop(); stopForId.put(stop.getId(), stop); stopVertexForStop.put(stop, transitStop); stopsForParentStation.put(stop.getParentStation(), stop); } } for (TransitStop stopVertex : stopVertexForStop.values()) { Envelope envelope = new Envelope(stopVertex.getCoordinate()); stopSpatialIndex.insert(envelope, stopVertex); } for (TripPattern pattern : patternForId.values()) { patternsForFeedId.put(pattern.getFeedId(), pattern); patternsForRoute.put(pattern.route, pattern); for (Trip trip : pattern.getTrips()) { patternForTrip.put(trip, pattern); tripForId.put(trip.getId(), trip); } for (Stop stop : pattern.getStops()) { patternsForStop.put(stop, pattern); } } for (Route route : patternsForRoute.asMap().keySet()) { routeForId.put(route.getId(), route); } // Copy these two service indexes from the graph until we have better ones. calendarService = graph.getCalendarService(); serviceCodes = graph.serviceCodes; this.graph = graph; LOG.info("Done indexing graph."); }
public static void main(String[] args) { Set<User> users = new TreeSet<>( new Comparator() { @Override public int compare(Object o1, Object o2) { if (!(o1 instanceof User || o2 instanceof User)) { throw new ClassCastException(); } User thisUser = (User) o1; User thatUser = (User) o2; return thisUser.getName().compareTo(thatUser.getName()); } }); User john = new User("John"); users.add(john); users.add(new User("Peter")); users.add(new User("Mike")); users.add(new User("123John")); for (User user : users) { System.out.println(user); } }
@Override public DfaInstructionState[] visitPush( PushInstruction instruction, DataFlowRunner runner, DfaMemoryState memState) { if (myContext == instruction.getPlace()) { final Map<DfaVariableValue, DfaVariableState> map = ((ValuableDataFlowRunner.MyDfaMemoryState) memState).getVariableStates(); for (Map.Entry<DfaVariableValue, DfaVariableState> entry : map.entrySet()) { ValuableDataFlowRunner.ValuableDfaVariableState state = (ValuableDataFlowRunner.ValuableDfaVariableState) entry.getValue(); DfaVariableValue variableValue = entry.getKey(); final PsiExpression psiExpression = state.myExpression; if (psiExpression != null && variableValue.getQualifier() == null) { myValues.put(variableValue.getPsiVariable(), psiExpression); } } DfaValue value = instruction.getValue(); if (value instanceof DfaVariableValue && ((DfaVariableValue) value).getQualifier() == null) { if (memState.isNotNull((DfaVariableValue) value)) { myNotNulls.add(((DfaVariableValue) value).getPsiVariable()); } if (memState.isNull(value)) { myNulls.add(((DfaVariableValue) value).getPsiVariable()); } } } return super.visitPush(instruction, runner, memState); }
private static void assertDeepChildrenEquals(SNode expectedNode, SNode actualNode) { Set<String> roles = new HashSet<String>(); for (SNode child : expectedNode.getChildren()) { roles.add(child.getRoleInParent()); } for (SNode child : actualNode.getChildren()) { roles.add(child.getRoleInParent()); } for (String role : roles) { Iterable<? extends SNode> expectedChildren = expectedNode.getChildren(role); Iterable<? extends SNode> actualChildren = actualNode.getChildren(role); int esize = IterableUtil.asCollection(expectedChildren).size(); int asize = IterableUtil.asCollection(actualChildren).size(); assertEquals( getErrorString("child count in role " + role, expectedNode, actualNode), esize, asize); Iterator<? extends SNode> actualIterator = actualChildren.iterator(); for (SNode expectedChild : expectedChildren) { SNode actualChild = actualIterator.next(); assertEquals( getErrorString("children in role " + role, expectedNode, actualNode), expectedChild.getNodeId(), actualChild.getNodeId()); assertDeepNodeEquals(expectedChild, actualChild); } } }
@Override public void consume(Object o) { if (!(o instanceof GroovyResolveResult)) { LOG.error(o); return; } GroovyResolveResult result = (GroovyResolveResult) o; if (!result.isStaticsOK()) { if (myInapplicable == null) myInapplicable = ContainerUtil.newArrayList(); myInapplicable.add(result); return; } if (!result.isAccessible() && myParameters.getInvocationCount() < 2) return; if (mySkipPackages && result.getElement() instanceof PsiPackage) return; PsiElement element = result.getElement(); if (element instanceof PsiVariable && !myMatcher.prefixMatches(((PsiVariable) element).getName())) { return; } if (element instanceof GrReflectedMethod) { element = ((GrReflectedMethod) element).getBaseMethod(); if (!myProcessedMethodWithOptionalParams.add((GrMethod) element)) return; result = new GroovyResolveResultImpl( element, result.getCurrentFileResolveContext(), result.getSpreadState(), result.getSubstitutor(), result.isAccessible(), result.isStaticsOK(), result.isInvokedOnProperty(), result.isValidResult()); } if (myFieldPointerOperator && !(element instanceof PsiVariable)) { return; } if (myMethodPointerOperator && !(element instanceof PsiMethod)) { return; } addCandidate(result); if (!myFieldPointerOperator && !myMethodPointerOperator) { if (element instanceof PsiMethod) { processProperty((PsiMethod) element, result); } else if (element instanceof GrField) { if (((GrField) element).isProperty()) { processPropertyFromField((GrField) element, result); } } } if (element instanceof GrVariable && !(element instanceof GrField)) { myLocalVars.add(((GrVariable) element).getName()); } }
public void testGetPhysicalPosition() throws IOException { final int records = 10000; long seed = System.currentTimeMillis(); MersenneTwisterFast mersenneTwisterFast = new MersenneTwisterFast(seed); System.out.println("testGetPhysicalPosition seed : " + seed); Set<OPhysicalPosition> positions = new HashSet<OPhysicalPosition>(); ORecordVersion recordVersion = OVersionFactory.instance().createVersion(); recordVersion.increment(); recordVersion.increment(); for (int i = 0; i < records; i++) { int recordSize = mersenneTwisterFast.nextInt(2 * OClusterPage.MAX_RECORD_SIZE) + 1; byte[] record = new byte[recordSize]; mersenneTwisterFast.nextBytes(record); recordVersion.increment(); final OPhysicalPosition physicalPosition = paginatedCluster.createRecord(record, recordVersion, (byte) i); positions.add(physicalPosition); } Set<OPhysicalPosition> removedPositions = new HashSet<OPhysicalPosition>(); for (OPhysicalPosition position : positions) { OPhysicalPosition physicalPosition = new OPhysicalPosition(); physicalPosition.clusterPosition = position.clusterPosition; physicalPosition = paginatedCluster.getPhysicalPosition(physicalPosition); Assert.assertEquals(physicalPosition.clusterPosition, position.clusterPosition); Assert.assertEquals(physicalPosition.recordType, position.recordType); Assert.assertEquals(physicalPosition.recordSize, position.recordSize); Assert.assertEquals(physicalPosition.dataSegmentPos, position.dataSegmentPos); Assert.assertEquals(physicalPosition.dataSegmentId, position.dataSegmentId); if (mersenneTwisterFast.nextBoolean()) { paginatedCluster.deleteRecord(position.clusterPosition); removedPositions.add(position); } } for (OPhysicalPosition position : positions) { OPhysicalPosition physicalPosition = new OPhysicalPosition(); physicalPosition.clusterPosition = position.clusterPosition; physicalPosition = paginatedCluster.getPhysicalPosition(physicalPosition); if (removedPositions.contains(position)) Assert.assertNull(physicalPosition); else { Assert.assertEquals(physicalPosition.clusterPosition, position.clusterPosition); Assert.assertEquals(physicalPosition.recordType, position.recordType); Assert.assertEquals(physicalPosition.recordSize, position.recordSize); Assert.assertEquals(physicalPosition.dataSegmentPos, position.dataSegmentPos); Assert.assertEquals(physicalPosition.dataSegmentId, position.dataSegmentId); } } }
private static Map<String, PsiType> getCompatibleTypeNames( @NotNull PsiType type, @Nullable PsiType min, PsiManager manager, GlobalSearchScope scope) { if (type instanceof PsiDisjunctionType) type = ((PsiDisjunctionType) type).getLeastUpperBound(); // if initial type is not assignable to min type we don't take into consideration min type. if (min != null && !TypesUtil.isAssignable(min, type, manager, scope)) { min = null; } Map<String, PsiType> map = new LinkedHashMap<String, PsiType>(); final PsiPrimitiveType unboxed = PsiPrimitiveType.getUnboxedType(type); if (unboxed != null) type = unboxed; final Set<PsiType> set = new LinkedHashSet<PsiType>(); set.add(type); while (!set.isEmpty()) { PsiType cur = set.iterator().next(); set.remove(cur); if (!map.containsValue(cur) && (min == null || TypesUtil.isAssignable(min, cur, manager, scope))) { if (isPartiallySubstituted(cur)) { LOG.assertTrue(cur instanceof PsiClassType); PsiClassType rawType = ((PsiClassType) cur).rawType(); map.put(rawType.getPresentableText(), rawType); } else { map.put(cur.getPresentableText(), cur); } for (PsiType superType : cur.getSuperTypes()) { if (!map.containsValue(superType)) { set.add(superType); } } } } return map; }
@Override public void saveConceptRelation(int catId, int conceptId) { byte[] key = (prefix + "id2aids").getBytes(ENCODING); byte[] hkey = NumberUtils.int2Bytes(catId); byte[] value = jedis.hget(key, hkey); Set<Integer> ids = NumberUtils.bytes2IntSet(value); if (!ids.contains(conceptId)) { ids.add(conceptId); jedis.hset(key, hkey, NumberUtils.intSet2Bytes(ids)); } // 记录该类别下概念的数量 incConceptCount(catId, 1); // 记录概念所隶属的类别 key = (prefix + "ctp:catids").getBytes(ENCODING); hkey = NumberUtils.int2Bytes(conceptId); value = jedis.hget(key, hkey); ids = NumberUtils.bytes2IntSet(value); if (!ids.contains(catId)) { ids.add(catId); jedis.hset(key, hkey, NumberUtils.intSet2Bytes(ids)); } }
private void buildFullyConnectedGraph(NodeLists listPairs) { for (int i = 0; i < listPairs.getList1().size(); i++) { // falls Knoten noch nicht erzeugt wurde if (!graph.containsVertex(listPairs.getList1().get(i))) { graph.addVertex(listPairs.getList1().get(i)); } if (!parents.contains(listPairs.getList1().get(i))) { parents.add(listPairs.getList1().get(i)); } // falls Knoten noch nicht erzeugt wurde for (int j = 0; j < listPairs.getList2().size(); j++) { if (!graph.containsVertex(listPairs.getList2().get(j))) { graph.addVertex(listPairs.getList2().get(j)); } if (!children.contains(listPairs.getList2().get(j))) { children.add(listPairs.getList2().get(j)); } // Erstelle eine Kante graph.addEdge(listPairs.getList1().get(i), listPairs.getList2().get(j)); System.out.println(listPairs.getList1().get(i) + " -> " + listPairs.getList2().get(j)); } } }
/** * Compute the set of all IA32 opcodes that have emit methods in the Assembler. This method uses * the stylized form of all emit method names in the Assembler to extract the opcode of each one. * It returns a set of all such distinct names, as a set of Strings. * * @param emitters the set of all emit methods in the Assembler * @return the set of all opcodes handled by the Assembler */ private static Set<String> getOpcodes(Method[] emitters) { Set<String> s = new HashSet<String>(); for (int i = 0; i < emitters.length; i++) { String name = emitters[i].getName(); if (DEBUG) System.err.println(name); if (name.startsWith("emit")) { int posOf_ = name.indexOf('_'); if (posOf_ != -1) { String opcode = name.substring(4, posOf_); if (!excludedOpcodes.contains(opcode)) { s.add(opcode); } } else { String opcode = name.substring(4); // make sure it is an opcode if (opcode.equals(opcode.toUpperCase(Locale.getDefault()))) { if (!excludedOpcodes.contains(opcode)) { s.add(opcode); } } } } } return s; }
/** * register a POIFSReaderListener for a particular document * * @param listener the listener * @param path the path of the document of interest * @param documentName the name of the document of interest */ void registerListener( final POIFSReaderListener listener, final POIFSDocumentPath path, final String documentName) { if (!omnivorousListeners.contains(listener)) { // not an omnivorous listener (if it was, this method is a // no-op) Set<DocumentDescriptor> descriptors = selectiveListeners.get(listener); if (descriptors == null) { // this listener has not registered before descriptors = new HashSet<DocumentDescriptor>(); selectiveListeners.put(listener, descriptors); } DocumentDescriptor descriptor = new DocumentDescriptor(path, documentName); if (descriptors.add(descriptor)) { // this listener wasn't already listening for this // document -- add the listener to the set of // listeners for this document Set<POIFSReaderListener> listeners = chosenDocumentDescriptors.get(descriptor); if (listeners == null) { // nobody was listening for this document before listeners = new HashSet<POIFSReaderListener>(); chosenDocumentDescriptors.put(descriptor, listeners); } listeners.add(listener); } } }
private void onFinishWebUITests(ITestContext testContext) { // List of test results which we will delete later because of duplication or because the test // eventually passed List<ITestResult> testsToBeRemoved = new ArrayList<ITestResult>(); // collect all id's from passed test Set<Integer> passedTestIds = new HashSet<Integer>(); for (ITestResult passedTest : testContext.getPassedTests().getAllResults()) { passedTestIds.add(getTestId(passedTest)); } Set<Integer> failedTestIds = new HashSet<Integer>(); for (ITestResult failedTest : testContext.getFailedTests().getAllResults()) { int failedTestId = getTestId(failedTest); // if this test failed before mark as to be deleted // or delete this failed test if there is at least one passed version if (failedTestIds.contains(failedTestId) || passedTestIds.contains(failedTestId)) { testsToBeRemoved.add(failedTest); } else { failedTestIds.add(failedTestId); } } // finally delete all tests that are marked for (Iterator<ITestResult> iterator = testContext.getFailedTests().getAllResults().iterator(); iterator.hasNext(); ) { ITestResult testResult = iterator.next(); if (testsToBeRemoved.contains(testResult)) { iterator.remove(); } } }
@Override public void visit( final int version, final int access, final String name, final String signature, final String superName, final String[] interfaces) { Set<String> interfacesSet = new LinkedHashSet<String>(); if (interfaces != null) Collections.addAll(interfacesSet, interfaces); for (Class extraInterface : classList) { if (extraInterface.isInterface()) interfacesSet.add(BytecodeHelper.getClassInternalName(extraInterface)); } final boolean addGroovyObjectSupport = !GroovyObject.class.isAssignableFrom(superClass); if (addGroovyObjectSupport) interfacesSet.add("groovy/lang/GroovyObject"); super.visit( V1_5, ACC_PUBLIC, proxyName, signature, BytecodeHelper.getClassInternalName(superClass), interfacesSet.toArray(new String[interfacesSet.size()])); addDelegateFields(); if (addGroovyObjectSupport) { createGroovyObjectSupport(); } for (Class clazz : classList) { visitClass(clazz); } }
private boolean existsUnblockedSemiDirectedPath(Node from, Node to, List<Node> cond, Graph G) { Queue<Node> Q = new LinkedList<Node>(); Set<Node> V = new HashSet<Node>(); Q.offer(from); V.add(from); while (!Q.isEmpty()) { Node t = Q.remove(); if (t == to) return true; for (Node u : G.getAdjacentNodes(t)) { Edge edge = G.getEdge(t, u); Node c = Edges.traverseSemiDirected(t, edge); if (c == null) continue; if (cond.contains(c)) continue; if (c == to) return true; if (!V.contains(c)) { V.add(c); Q.offer(c); } } } return false; }
private static void collectIncludedArtifacts( Artifact artifact, final PackagingElementResolvingContext context, final Set<Artifact> processed, final Set<Artifact> result, final boolean withOutputPathOnly) { if (!processed.add(artifact)) { return; } if (!withOutputPathOnly || !StringUtil.isEmpty(artifact.getOutputPath())) { result.add(artifact); } ArtifactUtil.processPackagingElements( artifact, ArtifactElementType.ARTIFACT_ELEMENT_TYPE, new Processor<ArtifactPackagingElement>() { @Override public boolean process(ArtifactPackagingElement element) { Artifact included = element.findArtifact(context); if (included != null) { collectIncludedArtifacts(included, context, processed, result, withOutputPathOnly); } return true; } }, context, false); }
protected void addLookupItem( Set<LookupElement> set, TailType tailType, @NotNull Object completion, final PsiFile file, final CompletionVariant variant) { LookupElement ret = objectToLookupItem(completion); if (ret == null) return; if (!(ret instanceof LookupItem)) { set.add(ret); return; } LookupItem item = (LookupItem) ret; final InsertHandler insertHandler = variant.getInsertHandler(); if (insertHandler != null && item.getInsertHandler() == null) { item.setInsertHandler(insertHandler); item.setTailType(TailType.UNKNOWN); } else if (tailType != TailType.NONE) { item.setTailType(tailType); } final Map<Object, Object> itemProperties = variant.getItemProperties(); for (final Object key : itemProperties.keySet()) { item.setAttribute(key, itemProperties.get(key)); } set.add(ret); }
private void processFromProperties(TypeElement type, Set<TypeElement> types) { List<? extends Element> children = type.getEnclosedElements(); VisitorConfig config = conf.getConfig(type, children); // fields if (config.visitFieldProperties()) { for (VariableElement field : ElementFilter.fieldsIn(children)) { TypeElement typeElement = typeExtractor.visit(field.asType()); if (typeElement != null) { types.add(typeElement); } } } // getters if (config.visitMethodProperties()) { for (ExecutableElement method : ElementFilter.methodsIn(children)) { String name = method.getSimpleName().toString(); if ((name.startsWith("get") || name.startsWith("is")) && method.getParameters().isEmpty()) { TypeElement typeElement = typeExtractor.visit(method.getReturnType()); if (typeElement != null) { types.add(typeElement); } } } } }
/** * Gets closure of all the referenced documents from the primary document(typically the service * WSDL). It traverses the WSDL and schema imports and builds a closure set of documents. * * @param systemId primary wsdl or the any root document * @param resolver used to get SDDocumentImpl for a document * @param onlyTopLevelSchemas if true, the imported schemas from a schema would be ignored * @return all the documents */ public static Map<String, SDDocument> getMetadataClosure( @NotNull String systemId, @NotNull SDDocumentResolver resolver, boolean onlyTopLevelSchemas) { Map<String, SDDocument> closureDocs = new HashMap<String, SDDocument>(); Set<String> remaining = new HashSet<String>(); remaining.add(systemId); while (!remaining.isEmpty()) { Iterator<String> it = remaining.iterator(); String current = it.next(); remaining.remove(current); SDDocument currentDoc = resolver.resolve(current); SDDocument old = closureDocs.put(currentDoc.getURL().toExternalForm(), currentDoc); assert old == null; Set<String> imports = currentDoc.getImports(); if (!currentDoc.isSchema() || !onlyTopLevelSchemas) { for (String importedDoc : imports) { if (closureDocs.get(importedDoc) == null) { remaining.add(importedDoc); } } } } return closureDocs; }
static { getAliases.add("get"); getAliases.add("list"); getAliases.add("show"); getAliases.add("display"); }
public Pair<Set<Instruction>, Set<Instruction>> getConstConditionalExpressions() { Set<Instruction> trueSet = new HashSet<Instruction>(); Set<Instruction> falseSet = new HashSet<Instruction>(); for (Instruction instruction : myInstructions) { if (instruction instanceof BranchingInstruction) { BranchingInstruction branchingInstruction = (BranchingInstruction) instruction; if (branchingInstruction.getPsiAnchor() != null && branchingInstruction.isConditionConst()) { if (!branchingInstruction.isTrueReachable()) { falseSet.add(branchingInstruction); } if (!branchingInstruction.isFalseReachable()) { trueSet.add(branchingInstruction); } } } } for (Instruction instruction : myInstructions) { if (instruction instanceof BranchingInstruction) { BranchingInstruction branchingInstruction = (BranchingInstruction) instruction; if (branchingInstruction.isTrueReachable()) { falseSet.remove(branchingInstruction); } if (branchingInstruction.isFalseReachable()) { trueSet.remove(branchingInstruction); } } } return Pair.create(trueSet, falseSet); }
static { Set<EncodingRule> rules = new HashSet<EncodingRule>(); rules.add(new EncodingRule("*", "%2A")); rules.add(new EncodingRule("+", "%20")); rules.add(new EncodingRule("%7E", "~")); ENCODING_RULES = Collections.unmodifiableSet(rules); }
@Test public void testCheck() { Random rnd = random(); Set<Object> jdk = Collections.newSetFromMap(new IdentityHashMap<Object, Boolean>()); RamUsageEstimator.IdentityHashSet<Object> us = new RamUsageEstimator.IdentityHashSet<Object>(); int max = 100000; int threshold = 256; for (int i = 0; i < max; i++) { // some of these will be interned and some will not so there will be collisions. Integer v = rnd.nextInt(threshold); boolean e1 = jdk.contains(v); boolean e2 = us.contains(v); Assert.assertEquals(e1, e2); e1 = jdk.add(v); e2 = us.add(v); Assert.assertEquals(e1, e2); } Set<Object> collected = Collections.newSetFromMap(new IdentityHashMap<Object, Boolean>()); for (Object o : us) { collected.add(o); } Assert.assertEquals(collected, jdk); }
private RDFRectangle toRectangle(StatementPattern pattern, BindingSet bindings) { Value sVal = pattern.getSubjectVar().getValue(); Value pVal = pattern.getPredicateVar().getValue(); Value oVal = pattern.getObjectVar().getValue(); RDFURIRange subjectRange; List<String> list = new ArrayList<String>(); if (sVal == null) { if (bindings.hasBinding(pattern.getSubjectVar().getName())) list.add(bindings.getValue(pattern.getSubjectVar().getName()).stringValue()); } else list.add(sVal.stringValue()); if (!list.isEmpty()) subjectRange = new RDFURIRange(list); else subjectRange = new RDFURIRange(); ExplicitSetRange<URI> predicateRange; Set<URI> set = new HashSet<URI>(); if (pVal == null) { if (bindings.hasBinding(pattern.getPredicateVar().getName())) set.add((URI) bindings.getValue(pattern.getPredicateVar().getName())); } else set.add((URI) pVal); if (!set.isEmpty()) predicateRange = new ExplicitSetRange<URI>(set); else predicateRange = new ExplicitSetRange<>(); RDFValueRange objectRange = new RDFValueRange(); if (oVal == null) { if (bindings.hasBinding(pattern.getObjectVar().getName())) objectRange = fillObjectRange(bindings.getValue(pattern.getObjectVar().getName())); } else objectRange = fillObjectRange(oVal); return new RDFRectangle(subjectRange, predicateRange, objectRange); }
protected boolean preprocessUsages(Ref<UsageInfo[]> refUsages) { final MultiMap<PsiElement, String> conflicts = new MultiMap<PsiElement, String>(); checkExistingMethods(conflicts, true); checkExistingMethods(conflicts, false); final Collection<PsiClass> classes = ClassInheritorsSearch.search(myClass).findAll(); for (FieldDescriptor fieldDescriptor : myFieldDescriptors) { final Set<PsiMethod> setters = new HashSet<PsiMethod>(); final Set<PsiMethod> getters = new HashSet<PsiMethod>(); for (PsiClass aClass : classes) { final PsiMethod getterOverrider = myDescriptor.isToEncapsulateGet() ? aClass.findMethodBySignature(fieldDescriptor.getGetterPrototype(), false) : null; if (getterOverrider != null) { getters.add(getterOverrider); } final PsiMethod setterOverrider = myDescriptor.isToEncapsulateSet() ? aClass.findMethodBySignature(fieldDescriptor.getSetterPrototype(), false) : null; if (setterOverrider != null) { setters.add(setterOverrider); } } if (!getters.isEmpty() || !setters.isEmpty()) { final PsiField field = fieldDescriptor.getField(); for (PsiReference reference : ReferencesSearch.search(field)) { final PsiElement place = reference.getElement(); if (place instanceof PsiReferenceExpression) { final PsiExpression qualifierExpression = ((PsiReferenceExpression) place).getQualifierExpression(); final PsiClass ancestor; if (qualifierExpression == null) { ancestor = PsiTreeUtil.getParentOfType(place, PsiClass.class, false); } else { ancestor = PsiUtil.resolveClassInType(qualifierExpression.getType()); } final boolean isGetter = !PsiUtil.isAccessedForWriting((PsiExpression) place); for (PsiMethod overridden : isGetter ? getters : setters) { if (InheritanceUtil.isInheritorOrSelf(myClass, ancestor, true)) { conflicts.putValue( overridden, "There is already a " + RefactoringUIUtil.getDescription(overridden, true) + " which would hide generated " + (isGetter ? "getter" : "setter") + " for " + place.getText()); break; } } } } } } return showConflicts(conflicts, refUsages.get()); }
@NotNull private Set<VcsRef> readBranches(@NotNull GitRepository repository) { StopWatch sw = StopWatch.start("readBranches in " + repository.getRoot().getName()); VirtualFile root = repository.getRoot(); repository.update(); Collection<GitLocalBranch> localBranches = repository.getBranches().getLocalBranches(); Collection<GitRemoteBranch> remoteBranches = repository.getBranches().getRemoteBranches(); Set<VcsRef> refs = new THashSet<VcsRef>(localBranches.size() + remoteBranches.size()); for (GitLocalBranch localBranch : localBranches) { refs.add( myVcsObjectsFactory.createRef( localBranch.getHash(), localBranch.getName(), GitRefManager.LOCAL_BRANCH, root)); } for (GitRemoteBranch remoteBranch : remoteBranches) { refs.add( myVcsObjectsFactory.createRef( remoteBranch.getHash(), remoteBranch.getNameForLocalOperations(), GitRefManager.REMOTE_BRANCH, root)); } String currentRevision = repository.getCurrentRevision(); if (currentRevision != null) { // null => fresh repository refs.add( myVcsObjectsFactory.createRef( HashImpl.build(currentRevision), "HEAD", GitRefManager.HEAD, root)); } sw.report(); return refs; }
private static Set<String> createDelegateMethodList(Class superClass, Class[] interfaces) { Set<String> selectedMethods = new HashSet<String>(); List<Method> interfaceMethods = new ArrayList<Method>(); if (interfaces != null) { for (Class thisInterface : interfaces) { getInheritedMethods(thisInterface, interfaceMethods); } for (Method method : interfaceMethods) { if (!containsEquivalentMethod(OBJECT_METHODS, method) && !containsEquivalentMethod(GROOVYOBJECT_METHODS, method)) { selectedMethods.add(method.getName()); } } } List<Method> additionalMethods = getInheritedMethods(superClass, new ArrayList<Method>()); for (Method method : additionalMethods) { if (method.getName().indexOf('$') != -1) continue; if (!containsEquivalentMethod(interfaceMethods, method) && !containsEquivalentMethod(OBJECT_METHODS, method) && !containsEquivalentMethod(GROOVYOBJECT_METHODS, method)) { selectedMethods.add(method.getName()); } } return selectedMethods; }
/** * Returns actions that may be taken when {@link ServiceErrorID#TRANSPORT_ERROR} occurs * * @return <code>ErrorHandlingAction</code>s caused because of transport(JMS) */ protected Set<ErrorHandlingAction> getActionsForTransportError() { Set<ErrorHandlingAction> actions = new LinkedHashSet<>(); actions.add(ErrorHandlingActionFactory.createErrorHandlingAction(ErrorHandlingAction.LOG)); actions.add( ErrorHandlingActionFactory.createErrorHandlingAction(ErrorHandlingAction.STOP_SERVICE)); return actions; }
public static void viewTagInfo(String inurl, String tag) throws Exception { // pr = new PrintStream(new FileOutputStream("/semplest/lluis/keywordExp/urldata.txt")); pr = System.out; long start = System.currentTimeMillis(); pr.println(inurl + "****************************************************************"); printList(cleanUrlText(TextUtils.HTMLText(inurl, tag))); String urls = TextUtils.HTMLLinkString(inurl, inurl); String[] url = urls.split("\\s+"); Set<String> urlMap = new HashSet<String>(url.length); urlMap.add(inurl); for (String ur : url) { if (!urlMap.contains(ur)) { pr.println(ur + "***************************************************************"); try { printList(cleanUrlText(TextUtils.HTMLText(ur, tag))); } catch (Exception e) { System.out.println("Error with url :" + ur); e.printStackTrace(); logger.error("Problem", e); } urlMap.add(ur); } } pr.println("Time elapsed" + (start - System.currentTimeMillis())); }