/** * Assigns sequential identifiers to the provided <code>clusters</code> (and their sub-clusters). * If a cluster already has an identifier, the identifier will not be changed. * * @param clusters Clusters to assign identifiers to. * @throws IllegalArgumentException if the provided clusters contain non-unique identifiers */ public static void assignClusterIds(Collection<Cluster> clusters) { final ArrayList<Cluster> flattened = Lists.newArrayListWithExpectedSize(clusters.size()); flatten(flattened, clusters); synchronized (clusters) { final HashSet<Integer> ids = Sets.newHashSet(); // First, find the start value for the id and check uniqueness of the ids // already provided. int maxId = Integer.MIN_VALUE; for (final Cluster cluster : flattened) { if (cluster.id != null) { if (!ids.add(cluster.id)) { throw new IllegalArgumentException("Non-unique cluster id found: " + cluster.id); } maxId = Math.max(maxId, cluster.id); } } // We'd rather start with 0 maxId = Math.max(maxId, -1); // Assign missing ids for (final Cluster c : flattened) { if (c.id == null) { c.id = ++maxId; } } } }
/** * For unique local packages. Returns the cached LocalPkgInfo for the requested type. Loads it * from disk if not cached. * * @param filter {@link PkgType#PKG_TOOLS} or {@link PkgType#PKG_PLATFORM_TOOLS} or {@link * PkgType#PKG_DOC}. * @return null if the package is not installed. */ @Nullable public LocalPkgInfo getPkgInfo(@NonNull PkgType filter) { if (filter != PkgType.PKG_TOOLS && filter != PkgType.PKG_PLATFORM_TOOLS && filter != PkgType.PKG_DOC && filter != PkgType.PKG_NDK) { assert false; return null; } LocalPkgInfo info = null; synchronized (mLocalPackages) { Collection<LocalPkgInfo> existing = mLocalPackages.get(filter); assert existing.size() <= 1; if (!existing.isEmpty()) { return existing.iterator().next(); } File uniqueDir = new File(mSdkRoot, filter.getFolderName()); if (!mVisitedDirs.containsEntry(filter, new LocalDirInfo.MapComparator(uniqueDir))) { switch (filter) { case PKG_TOOLS: info = scanTools(uniqueDir); break; case PKG_PLATFORM_TOOLS: info = scanPlatformTools(uniqueDir); break; case PKG_DOC: info = scanDoc(uniqueDir); break; case PKG_NDK: info = scanNdk(uniqueDir); default: break; } } // Whether we have found a valid pkg or not, this directory has been visited. mVisitedDirs.put(filter, new LocalDirInfo(mFileOp, uniqueDir)); if (info != null) { mLocalPackages.put(filter, info); } } return info; }
/** * collect saved Reflections resources from all urls that contains the given packagePrefix and * matches the given resourceNameFilter and de-serializes them using the default serializer {@link * XmlSerializer} or using the optionally supplied optionalSerializer * * <p>it is preferred to use a designated resource prefix (for example META-INF/reflections but * not just META-INF), so that relevant urls could be found much faster * * @param optionalSerializer - optionally supply one serializer instance. if not specified or * null, {@link XmlSerializer} will be used */ public static Reflections collect( final String packagePrefix, final Predicate<String> resourceNameFilter, @Nullable Serializer... optionalSerializer) { Serializer serializer = optionalSerializer != null && optionalSerializer.length == 1 ? optionalSerializer[0] : new XmlSerializer(); Collection<URL> urls = ClasspathHelper.forPackage(packagePrefix); if (urls.isEmpty()) return null; long start = System.currentTimeMillis(); final Reflections reflections = new Reflections(); Iterable<Vfs.File> files = Vfs.findFiles(urls, packagePrefix, resourceNameFilter); for (final Vfs.File file : files) { InputStream inputStream = null; try { inputStream = file.openInputStream(); reflections.merge(serializer.read(inputStream)); } catch (IOException e) { throw new ReflectionsException("could not merge " + file, e); } finally { close(inputStream); } } if (log != null) { Store store = reflections.getStore(); int keys = 0; int values = 0; for (String index : store.keySet()) { keys += store.get(index).keySet().size(); values += store.get(index).size(); } log.info( format( "Reflections took %d ms to collect %d url%s, producing %d keys and %d values [%s]", System.currentTimeMillis() - start, urls.size(), urls.size() > 1 ? "s" : "", keys, values, Joiner.on(", ").join(urls))); } return reflections; }
/** * Add new relocating ranges (tokens moving from their respective endpoints, to another). * * @param tokens tokens being moved * @param endpoint destination of moves */ public void addRelocatingTokens(Collection<Token> tokens, InetAddress endpoint) { assert endpoint != null; assert tokens != null && tokens.size() > 0; lock.writeLock().lock(); try { for (Token token : tokens) { InetAddress prev = relocatingTokens.put(token, endpoint); if (prev != null && !prev.equals(endpoint)) logger.warn( "Relocation of {} to {} overwrites previous to {}", new Object[] {token, endpoint, prev}); } } finally { lock.writeLock().unlock(); } }
private void checkRedeclarationsInPackages(@NotNull TopDownAnalysisContext c) { for (MutablePackageFragmentDescriptor packageFragment : Sets.newHashSet(c.getPackageFragments().values())) { PackageViewDescriptor packageView = packageFragment.getContainingDeclaration().getPackage(packageFragment.getFqName()); JetScope packageViewScope = packageView.getMemberScope(); Multimap<Name, DeclarationDescriptor> simpleNameDescriptors = packageFragment.getMemberScope().getDeclaredDescriptorsAccessibleBySimpleName(); for (Name name : simpleNameDescriptors.keySet()) { // Keep only properties with no receiver Collection<DeclarationDescriptor> descriptors = Collections2.filter( simpleNameDescriptors.get(name), new Predicate<DeclarationDescriptor>() { @Override public boolean apply(@Nullable DeclarationDescriptor descriptor) { if (descriptor instanceof PropertyDescriptor) { PropertyDescriptor propertyDescriptor = (PropertyDescriptor) descriptor; return propertyDescriptor.getReceiverParameter() == null; } return true; } }); ContainerUtil.addIfNotNull(descriptors, packageViewScope.getPackage(name)); if (descriptors.size() > 1) { for (DeclarationDescriptor declarationDescriptor : descriptors) { for (PsiElement declaration : getDeclarationsByDescriptor(declarationDescriptor)) { assert declaration != null : "Null declaration for descriptor: " + declarationDescriptor + " " + (declarationDescriptor != null ? DescriptorRenderer.FQ_NAMES_IN_TYPES.render(declarationDescriptor) : ""); trace.report( REDECLARATION.on(declaration, declarationDescriptor.getName().asString())); } } } } } }
/** * 按照对应关系对 aitID:threadInfo-> 1:n 对N从大到小排序 处理MulitMap中的数据,key:value->1:n 取出<key, n> * 对n降序排序后,取得序列后的List<Map.Entry<key, n>> * * @return */ public List<Map.Entry<String, Integer>> getOrderList(Multimap<String, ThreadInfo> w_IdMap) { Set<String> keys = w_IdMap.keySet(); Map<String, Integer> w_IdMappingThread = Maps.newHashMap(); for (String key : keys) { Collection<ThreadInfo> values = w_IdMap.get(key); w_IdMappingThread.put(key, values.size()); } List<Map.Entry<String, Integer>> orderList = new ArrayList<Map.Entry<String, Integer>>(w_IdMappingThread.entrySet()); Collections.sort( orderList, new Comparator<Map.Entry<String, Integer>>() { @Override public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) { return o2.getValue().compareTo(o1.getValue()); } }); return orderList; }
private void reportRedeclarations(@NotNull Multimap<Name, DeclarationDescriptor> descriptorMap) { Set<Pair<PsiElement, Name>> redeclarations = Sets.newHashSet(); for (Name name : descriptorMap.keySet()) { Collection<DeclarationDescriptor> descriptors = descriptorMap.get(name); if (descriptors.size() > 1) { // We mustn't compare PropertyDescriptor with PropertyDescriptor because we do this at // OverloadResolver for (DeclarationDescriptor descriptor : descriptors) { if (descriptor instanceof ClassDescriptor) { for (DeclarationDescriptor descriptor2 : descriptors) { if (descriptor == descriptor2) { continue; } redeclarations.add( Pair.create( BindingContextUtils.classDescriptorToDeclaration( trace.getBindingContext(), (ClassDescriptor) descriptor), descriptor.getName())); if (descriptor2 instanceof PropertyDescriptor) { redeclarations.add( Pair.create( BindingContextUtils.descriptorToDeclaration( trace.getBindingContext(), descriptor2), descriptor2.getName())); } } } } } } for (Pair<PsiElement, Name> redeclaration : redeclarations) { trace.report( REDECLARATION.on(redeclaration.getFirst(), redeclaration.getSecond().asString())); } }
public Collection<Range<Token>> getPrimaryRangesFor(Collection<Token> tokens) { Collection<Range<Token>> ranges = new ArrayList<Range<Token>>(tokens.size()); for (Token right : tokens) ranges.add(new Range<Token>(getPredecessor(right), right)); return ranges; }