private static List<FunctionDescriptor> getSuperFunctionsForMethod( @NotNull PsiMethodWrapper method, @NotNull BindingTrace trace, @NotNull ClassDescriptor containingClass) { List<FunctionDescriptor> superFunctions = Lists.newArrayList(); Map<ClassDescriptor, JetType> superclassToSupertype = getSuperclassToSupertypeMap(containingClass); Multimap<FqName, Pair<FunctionDescriptor, PsiMethod>> superclassToFunctions = getSuperclassToFunctionsMultimap(method, trace.getBindingContext(), containingClass); for (HierarchicalMethodSignature superSignature : method.getPsiMethod().getHierarchicalMethodSignature().getSuperSignatures()) { PsiMethod superMethod = superSignature.getMethod(); PsiClass psiClass = superMethod.getContainingClass(); assert psiClass != null; String classFqNameString = psiClass.getQualifiedName(); assert classFqNameString != null; FqName classFqName = new FqName(classFqNameString); if (!JavaToKotlinClassMap.getInstance().mapPlatformClass(classFqName).isEmpty()) { for (FunctionDescriptor superFun : JavaToKotlinMethodMap.INSTANCE.getFunctions(superMethod, containingClass)) { superFunctions.add(substituteSuperFunction(superclassToSupertype, superFun)); } continue; } DeclarationDescriptor superFun = superMethod instanceof JetClsMethod ? trace.get( BindingContext.DECLARATION_TO_DESCRIPTOR, ((JetClsMethod) superMethod).getOrigin()) : findSuperFunction(superclassToFunctions.get(classFqName), superMethod); if (superFun == null) { reportCantFindSuperFunction(method); continue; } assert superFun instanceof FunctionDescriptor : superFun.getClass().getName(); superFunctions.add( substituteSuperFunction(superclassToSupertype, (FunctionDescriptor) superFun)); } // sorting for diagnostic stability Collections.sort( superFunctions, new Comparator<FunctionDescriptor>() { @Override public int compare(FunctionDescriptor fun1, FunctionDescriptor fun2) { FqNameUnsafe fqName1 = getFQName(fun1.getContainingDeclaration()); FqNameUnsafe fqName2 = getFQName(fun2.getContainingDeclaration()); return fqName1.getFqName().compareTo(fqName2.getFqName()); } }); return superFunctions; }
/** * 按照对应关系对 aitID:threadInfo-> 1:n 对N从大到小排序 处理MulitMap中的数据,key:value->1:n 取出<key, n> * 对n降序排序后,取得序列后的List<Map.Entry<key, n>> * * @return */ public List<Map.Entry<String, Integer>> getOrderList(Multimap<String, ThreadInfo> w_IdMap) { Set<String> keys = w_IdMap.keySet(); Map<String, Integer> w_IdMappingThread = Maps.newHashMap(); for (String key : keys) { Collection<ThreadInfo> values = w_IdMap.get(key); w_IdMappingThread.put(key, values.size()); } List<Map.Entry<String, Integer>> orderList = new ArrayList<Map.Entry<String, Integer>>(w_IdMappingThread.entrySet()); Collections.sort( orderList, new Comparator<Map.Entry<String, Integer>>() { @Override public int compare(Map.Entry<String, Integer> o1, Map.Entry<String, Integer> o2) { return o2.getValue().compareTo(o1.getValue()); } }); return orderList; }
/** * Group files with similar min timestamp into buckets. Files with recent min timestamps are * grouped together into buckets designated to short timespans while files with older timestamps * are grouped into buckets representing longer timespans. * * @param files pairs consisting of a file and its min timestamp * @param timeUnit * @param base * @param now * @return a list of buckets of files. The list is ordered such that the files with newest * timestamps come first. Each bucket is also a list of files ordered from newest to oldest. */ @VisibleForTesting static <T> List<List<T>> getBuckets( Collection<Pair<T, Long>> files, long timeUnit, int base, long now) { // Sort files by age. Newest first. final List<Pair<T, Long>> sortedFiles = Lists.newArrayList(files); Collections.sort( sortedFiles, Collections.reverseOrder( new Comparator<Pair<T, Long>>() { public int compare(Pair<T, Long> p1, Pair<T, Long> p2) { return p1.right.compareTo(p2.right); } })); List<List<T>> buckets = Lists.newArrayList(); Target target = getInitialTarget(now, timeUnit); PeekingIterator<Pair<T, Long>> it = Iterators.peekingIterator(sortedFiles.iterator()); outerLoop: while (it.hasNext()) { while (!target.onTarget(it.peek().right)) { // If the file is too new for the target, skip it. if (target.compareToTimestamp(it.peek().right) < 0) { it.next(); if (!it.hasNext()) break outerLoop; } else // If the file is too old for the target, switch targets. target = target.nextTarget(base); } List<T> bucket = Lists.newArrayList(); while (target.onTarget(it.peek().right)) { bucket.add(it.next().left); if (!it.hasNext()) break; } buckets.add(bucket); } return buckets; }
private List<Token> sortTokens() { List<Token> tokens = new ArrayList<Token>(tokenToEndPointMap.keySet()); Collections.sort(tokens); return Collections.unmodifiableList(tokens); }
/** * Returns all documents in this cluster ordered according to the provided comparator. See {@link * Document} for common comparators, e.g. {@link Document#BY_ID_COMPARATOR} . */ public List<Document> getAllDocuments(Comparator<Document> comparator) { final List<Document> sortedDocuments = Lists.newArrayList(getAllDocuments()); Collections.sort(sortedDocuments, comparator); return sortedDocuments; }
/** * Retrieve all the info about the requested package types. This is used for the package types * that have one or more instances, each with different versions. The resulting array is sorted * according to the PkgInfo's sort order. * * <p>To force the LocalSdk parser to load <b>everything</b>, simply call this method with the * {@link PkgType#PKG_ALL} argument to load all the known package types. * * <p>Note: you can use this with {@link PkgType#PKG_TOOLS}, {@link PkgType#PKG_PLATFORM_TOOLS} * and {@link PkgType#PKG_DOC} but since there can only be one package of these types, it is more * efficient to use {@link #getPkgInfo(PkgType)} to query them. * * @param filters One or more of {@link PkgType#PKG_ADDON}, {@link PkgType#PKG_PLATFORM}, {@link * PkgType#PKG_BUILD_TOOLS}, {@link PkgType#PKG_EXTRA}, {@link PkgType#PKG_SOURCE}, {@link * PkgType#PKG_SYS_IMAGE} * @return A list (possibly empty) of matching installed packages. Never returns null. */ @NonNull public LocalPkgInfo[] getPkgsInfos(@NonNull EnumSet<PkgType> filters) { List<LocalPkgInfo> list = Lists.newArrayList(); for (PkgType filter : filters) { if (filter == PkgType.PKG_TOOLS || filter == PkgType.PKG_PLATFORM_TOOLS || filter == PkgType.PKG_DOC || filter == PkgType.PKG_NDK) { LocalPkgInfo info = getPkgInfo(filter); if (info != null) { list.add(info); } } else { synchronized (mLocalPackages) { Collection<LocalPkgInfo> existing = mLocalPackages.get(filter); assert existing != null; // Multimap returns an empty set if not found if (!existing.isEmpty()) { list.addAll(existing); continue; } File subDir = new File(mSdkRoot, filter.getFolderName()); if (!mVisitedDirs.containsEntry(filter, new LocalDirInfo.MapComparator(subDir))) { switch (filter) { case PKG_BUILD_TOOLS: scanBuildTools(subDir, existing); break; case PKG_PLATFORM: scanPlatforms(subDir, existing); break; case PKG_SYS_IMAGE: scanSysImages(subDir, existing, false); break; case PKG_ADDON_SYS_IMAGE: scanSysImages(subDir, existing, true); break; case PKG_ADDON: scanAddons(subDir, existing); break; case PKG_SAMPLE: scanSamples(subDir, existing); break; case PKG_SOURCE: scanSources(subDir, existing); break; case PKG_EXTRA: scanExtras(subDir, existing); break; case PKG_TOOLS: case PKG_PLATFORM_TOOLS: case PKG_DOC: case PKG_NDK: break; default: throw new IllegalArgumentException("Unsupported pkg type " + filter.toString()); } mVisitedDirs.put(filter, new LocalDirInfo(mFileOp, subDir)); list.addAll(existing); } } } } Collections.sort(list); return list.toArray(new LocalPkgInfo[list.size()]); }