private boolean processValue(Processor<V> p, Object v) { if (v instanceof Object[]) { for (Object o : (Object[]) v) { if (!p.process((V) o)) return false; } } else if (v != null) { return p.process((V) v); } return true; }
public static boolean processDependenciesInDependencyManagement( @NotNull MavenDomProjectModel projectDom, @NotNull final Processor<MavenDomDependency> processor, @NotNull final Project project) { Processor<MavenDomDependencies> managedDependenciesListProcessor = dependencies -> { SmartList<MavenDomDependency> importDependencies = null; for (MavenDomDependency domDependency : dependencies.getDependencies()) { if ("import".equals(domDependency.getScope().getRawText())) { if (importDependencies == null) { importDependencies = new SmartList<MavenDomDependency>(); } importDependencies.add(domDependency); } else { if (processor.process(domDependency)) return true; } } if (importDependencies != null) { for (MavenDomDependency domDependency : importDependencies) { GenericDomValue<String> version = domDependency.getVersion(); if (version.getXmlElement() != null) { GenericDomValueReference reference = new GenericDomValueReference(version); PsiElement resolve = reference.resolve(); if (resolve instanceof XmlFile) { MavenDomProjectModel dependModel = MavenDomUtil.getMavenDomModel((PsiFile) resolve, MavenDomProjectModel.class); if (dependModel != null) { for (MavenDomDependency dep : dependModel.getDependencyManagement().getDependencies().getDependencies()) { if (processor.process(dep)) return true; } } } } } } return false; }; Function<MavenDomProjectModelBase, MavenDomDependencies> domFunction = mavenDomProfile -> mavenDomProfile.getDependencyManagement().getDependencies(); return process(projectDom, managedDependenciesListProcessor, project, domFunction, domFunction); }
// uses hierarchy signature tree if available, traverses class structure by itself otherwise public static boolean processDirectSuperMethodsSmart( @NotNull PsiMethod method, @NotNull Processor<PsiMethod> superMethodProcessor) { // boolean old = PsiSuperMethodUtil.isSuperMethod(method, superMethod); PsiClass aClass = method.getContainingClass(); if (aClass == null) return false; if (!canHaveSuperMethod(method, true, false)) return false; Map<MethodSignature, HierarchicalMethodSignature> cachedMap = SIGNATURES_KEY.getCachedValueOrNull(aClass); if (cachedMap != null) { HierarchicalMethodSignature signature = cachedMap.get(method.getSignature(PsiSubstitutor.EMPTY)); if (signature != null) { List<HierarchicalMethodSignature> superSignatures = signature.getSuperSignatures(); for (HierarchicalMethodSignature superSignature : superSignatures) { if (!superMethodProcessor.process(superSignature.getMethod())) return false; } return true; } } PsiClassType[] directSupers = aClass.getSuperTypes(); for (PsiClassType directSuper : directSupers) { PsiClassType.ClassResolveResult resolveResult = directSuper.resolveGenerics(); if (resolveResult.getSubstitutor() != PsiSubstitutor.EMPTY) { // generics break; } PsiClass directSuperClass = resolveResult.getElement(); if (directSuperClass == null) continue; PsiMethod[] candidates = directSuperClass.findMethodsBySignature(method, false); for (PsiMethod candidate : candidates) { if (PsiUtil.canBeOverriden(candidate)) { if (!superMethodProcessor.process(candidate)) return false; } } return true; } List<HierarchicalMethodSignature> superSignatures = method.getHierarchicalMethodSignature().getSuperSignatures(); for (HierarchicalMethodSignature superSignature : superSignatures) { if (!superMethodProcessor.process(superSignature.getMethod())) return false; } return true; }
private static boolean processPatternContext( @NotNull BaseInjection injection, @NotNull PsiFile file, @NotNull Processor<PsiElement> processor) { return processor.process( getRootByClasses(file, InjectorUtils.getPatternClasses(injection.getSupportId()))); }
public static void processPSOutput(Process psProcess, Processor<String> processor) throws IOException { @SuppressWarnings({"IOResourceOpenedButNotSafelyClosed"}) BufferedReader stdOutput = new BufferedReader(new InputStreamReader(psProcess.getInputStream())); BufferedReader stdError = new BufferedReader(new InputStreamReader(psProcess.getErrorStream())); try { String s; stdOutput.readLine(); // ps output header while ((s = stdOutput.readLine()) != null) { processor.process(s); } StringBuilder errorStr = new StringBuilder(); while ((s = stdError.readLine()) != null) { errorStr.append(s).append("\n"); } if (errorStr.length() > 0) { throw new IllegalStateException("error:" + errorStr.toString()); } } finally { stdOutput.close(); stdError.close(); } }
public boolean execute(@NotNull PsiElement element, int offsetInElement) { PsiReference ref = element.getReference(); if (ref != null && ref.isReferenceTo(myElement)) { return myPsiReferenceProcessor.process(ref); } return true; }
/** * Retrieves indent options for PSI file from an associated document or (if not defined in the * document) from file indent options providers. * * @param file The PSI file to retrieve options for. * @param formatRange The text range within the file for formatting purposes or null if there is * either no specific range or multiple ranges. If the range covers the entire file (full * reformat), options stored in the document are ignored and indent options are taken from * file indent options providers. * @param ignoreDocOptions Ignore options stored in the document and use file indent options * providers even if there is no text range or the text range doesn't cover the entire file. * @param providerProcessor A callback object containing a reference to indent option provider * which has returned indent options. * @return Indent options from the associated document or file indent options providers. * @see com.intellij.psi.codeStyle.FileIndentOptionsProvider */ @NotNull public IndentOptions getIndentOptionsByFile( @Nullable PsiFile file, @Nullable TextRange formatRange, boolean ignoreDocOptions, @Nullable Processor<FileIndentOptionsProvider> providerProcessor) { if (file != null && file.isValid() && file.isWritable()) { boolean isFullReformat = isFileFullyCoveredByRange(file, formatRange); if (!ignoreDocOptions && !isFullReformat) { IndentOptions docOptions = IndentOptions.retrieveFromAssociatedDocument(file); if (docOptions != null) return docOptions; } FileIndentOptionsProvider[] providers = Extensions.getExtensions(FileIndentOptionsProvider.EP_NAME); for (FileIndentOptionsProvider provider : providers) { if (!isFullReformat || provider.useOnFullReformat()) { IndentOptions indentOptions = provider.getIndentOptions(this, file); if (indentOptions != null) { if (providerProcessor != null) { providerProcessor.process(provider); } logIndentOptions(file, provider, indentOptions); return indentOptions; } } } return getIndentOptions(file.getFileType()); } else return OTHER_INDENT_OPTIONS; }
public void processInjectableElements( Collection<PsiElement> in, Processor<PsiElement> processor) { ClassMapCachingNulls<MultiHostInjector> map = getInjectorMap(); for (PsiElement element : in) { if (map.get(element.getClass()) != null) processor.process(element); } }
private static boolean processRootsByClassNames( @NotNull PsiFile file, @Nullable String type, @NotNull Processor<PsiElement> processor) { Project project = file.getProject(); Set<String> classNames = collectDevPatternClassNames(project); if (!classNames.isEmpty()) { JavaPsiFacade psiFacade = JavaPsiFacade.getInstance(project); for (String className : classNames) { PsiClass patternClass = psiFacade.findClass(className, GlobalSearchScope.allScope(project)); if (patternClass != null && !processor.process(patternClass)) return false; } } Class[] classes = StringUtil.isEmpty(type) ? ArrayUtil.EMPTY_CLASS_ARRAY : PatternCompilerFactory.getFactory().getPatternClasses(type); return classes.length == 0 || processor.process(getRootByClasses(file, classes)); }
public void processWords(final CharSequence fileText, final Processor<WordOccurrence> processor) { myLexer.start(fileText, 0, fileText.length(), 0); WordOccurrence occurrence = null; // shared occurrence while (myLexer.getTokenType() != null) { final IElementType type = myLexer.getTokenType(); if (type == FanTokenTypes.IDENTIFIER || FanTokenTypes.FAN_SYS_TYPE == type) { if (occurrence == null) { occurrence = new WordOccurrence( fileText, myLexer.getTokenStart(), myLexer.getTokenEnd(), WordOccurrence.Kind.CODE); } else { occurrence.init( fileText, myLexer.getTokenStart(), myLexer.getTokenEnd(), WordOccurrence.Kind.CODE); } if (!processor.process(occurrence)) { return; } } else if (FanTokenTypes.COMMENTS.contains(type)) { if (!stripWords( processor, fileText, myLexer.getTokenStart(), myLexer.getTokenEnd(), WordOccurrence.Kind.COMMENTS, occurrence)) { return; } } else if (FanTokenTypes.STRING_LITERALS.contains(type)) { if (!stripWords( processor, fileText, myLexer.getTokenStart(), myLexer.getTokenEnd(), WordOccurrence.Kind.LITERALS, occurrence)) { return; } if (type == FanTokenTypes.STRING_LITERAL) { if (!stripWords( processor, fileText, myLexer.getTokenStart(), myLexer.getTokenEnd(), WordOccurrence.Kind.CODE, occurrence)) { return; } } } myLexer.advance(); } }
private boolean processFilesConcurrently( @NotNull Set<VirtualFile> files, @NotNull final ProgressIndicator indicator, @NotNull final Processor<VirtualFile> processor) { final List<VirtualFile> fileList = new ArrayList<VirtualFile>(files); // fine but grabs all CPUs // return JobLauncher.getInstance().invokeConcurrentlyUnderProgress(fileList, indicator, false, // false, processor); int parallelism = CacheUpdateRunner.indexingThreadCount(); final Callable<Boolean> processFileFromSet = () -> { final boolean[] result = {true}; ProgressManager.getInstance() .executeProcessUnderProgress( () -> { while (true) { ProgressManager.checkCanceled(); VirtualFile file; synchronized (fileList) { file = fileList.isEmpty() ? null : fileList.remove(fileList.size() - 1); } if (file == null) { break; } if (!processor.process(file)) { result[0] = false; break; } } }, indicator); return result[0]; }; List<Future<Boolean>> futures = ContainerUtil.map( Collections.nCopies(parallelism, ""), s -> myApplication.executeOnPooledThread(processFileFromSet)); List<Boolean> results = ContainerUtil.map( futures, future -> { try { return future.get(); } catch (Exception e) { LOG.error(e); } return false; }); return !ContainerUtil.exists( results, result -> { return result != null && !result; // null means PCE }); }
public static boolean processFilesRecursively( @NotNull File root, @NotNull Processor<File> processor, @Nullable final Processor<File> directoryFilter) { final LinkedList<File> queue = new LinkedList<File>(); queue.add(root); while (!queue.isEmpty()) { final File file = queue.removeFirst(); if (!processor.process(file)) return false; if (file.isDirectory() && (directoryFilter == null || directoryFilter.process(file))) { final File[] children = file.listFiles(); if (children != null) { ContainerUtil.addAll(queue, children); } } } return true; }
// must iterate in start offset order public boolean processRangeMarkers(@NotNull Processor<Segment> processor) { for (UsageInfo usageInfo : getMergedInfos()) { Segment segment = usageInfo.getSegment(); if (segment != null && !processor.process(segment)) { return false; } } return true; }
public void processIncludes(PsiFile file, Processor<FileIncludeInfo> processor) { GlobalSearchScope scope = GlobalSearchScope.allScope(myProject); List<FileIncludeInfoImpl> infoList = FileIncludeIndex.getIncludes(file.getVirtualFile(), scope); for (FileIncludeInfoImpl info : infoList) { if (!processor.process(info)) { return; } } }
// return true if element was pulled from the queue and processed successfully private boolean processNext() { T thing = pullFirst(); if (thing == null) return false; if (!myProcessor.process(thing)) { stop(); return false; } return true; }
@Override public boolean processFilesWithWord( @NotNull final Processor<PsiFile> psiFileProcessor, @NotNull final String word, final short occurrenceMask, @NotNull final GlobalSearchScope scope, final boolean caseSensitively) { final List<VirtualFile> vFiles = new ArrayList<VirtualFile>(5); collectVirtualFilesWithWord( new CommonProcessors.CollectProcessor<VirtualFile>(vFiles), word, occurrenceMask, scope, caseSensitively); if (vFiles.isEmpty()) return true; final Processor<VirtualFile> virtualFileProcessor = new ReadActionProcessor<VirtualFile>() { @Override public boolean processInReadAction(VirtualFile virtualFile) { if (virtualFile.isValid()) { final PsiFile psiFile = myPsiManager.findFile(virtualFile); return psiFile == null || psiFileProcessor.process(psiFile); } return true; } }; // IMPORTANT!!! // Since implementation of virtualFileProcessor.process() may call indices directly or // indirectly, // we cannot call it inside FileBasedIndex.processValues() method // If we do, deadlocks are possible (IDEADEV-42137). So first we obtain files with the word // specified, // and then process them not holding indices' read lock. for (VirtualFile vFile : vFiles) { ProgressIndicatorProvider.checkCanceled(); if (!virtualFileProcessor.process(vFile)) { return false; } } return true; }
protected boolean finishCommitInWriteAction( @NotNull final Document document, @NotNull final List<Processor<Document>> finishProcessors, final boolean synchronously) { if (myProject.isDisposed()) return false; assert !(document instanceof DocumentWindow); myIsCommitInProgress = true; boolean success = true; try { final FileViewProvider viewProvider = getCachedViewProvider(document); if (viewProvider != null) { for (Processor<Document> finishRunnable : finishProcessors) { success = finishRunnable.process(document); if (synchronously) { assert success : finishRunnable + " in " + finishProcessors; } if (!success) { break; } } if (success) { myLastCommittedTexts.remove(document); viewProvider.contentsSynchronized(); } } else { handleCommitWithoutPsi(document); } } finally { myDocumentCommitProcessor.log( "in PDI.finishDoc: ", null, synchronously, success, myUncommittedDocuments); if (success) { myUncommittedDocuments.remove(document); myDocumentCommitProcessor.log( "in PDI.finishDoc: removed doc", null, synchronously, success, myUncommittedDocuments); } myIsCommitInProgress = false; myDocumentCommitProcessor.log( "in PDI.finishDoc: exit", null, synchronously, success, myUncommittedDocuments); } return success; }
private void processJavaModuleTargets( @NotNull JpsProject jpsProject, @NotNull Processor<ModuleBuildTarget> processor) { for (JpsModule module : jpsProject.getModules()) { for (JavaModuleBuildTargetType buildTargetType : JavaModuleBuildTargetType.ALL_TYPES) { final ModuleBuildTarget moduleBuildTarget = new ModuleBuildTarget(module, buildTargetType); if (!processor.process(moduleBuildTarget)) { return; } } } }
public static boolean processFirstBytes( @NotNull InputStream stream, int length, @NotNull Processor<ByteSequence> processor) throws IOException { final byte[] bytes = BUFFER.get(); assert bytes.length >= length : "Cannot process more than " + bytes.length + " in one call, requested:" + length; int n = stream.read(bytes, 0, length); if (n <= 0) return false; return processor.process(new ByteSequence(bytes, 0, n)); }
private static boolean stripWords( final Processor<WordOccurrence> processor, final CharSequence tokenText, int from, int to, final WordOccurrence.Kind kind, WordOccurrence occurrence) { // This code seems strange but it is more effective as Character.isJavaIdentifier_xxx_ is quite // costly operation due to unicode int index = from; ScanWordsLoop: while (true) { while (true) { if (index == to) { break ScanWordsLoop; } char c = tokenText.charAt(index); if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9') || (Character.isJavaIdentifierStart(c) && c != '$')) { break; } index++; } int index1 = index; while (true) { index++; if (index == to) { break; } char c = tokenText.charAt(index); if ((c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c >= '0' && c <= '9')) { continue; } if (!Character.isJavaIdentifierPart(c) || c == '$') { break; } } if (occurrence == null) { occurrence = new WordOccurrence(tokenText, index1, index, kind); } else { occurrence.init(tokenText, index1, index, kind); } if (!processor.process(occurrence)) { return false; } } return true; }
public static boolean addResult( Processor<UsageInfo> results, PsiReference ref, FindUsagesOptions options) { if (filterUsage(ref.getElement(), options)) { TextRange rangeInElement = ref.getRangeInElement(); return results.process( new UsageInfo( ref.getElement(), rangeInElement.getStartOffset(), rangeInElement.getEndOffset(), false)); } return true; }
public static void processParentProjects( @NotNull final MavenDomProjectModel projectDom, @NotNull final Processor<MavenDomProjectModel> processor) { Set<MavenDomProjectModel> processed = new HashSet<MavenDomProjectModel>(); Project project = projectDom.getManager().getProject(); MavenDomProjectModel parent = findParent(projectDom, project); while (parent != null) { if (processed.contains(parent)) break; processed.add(parent); if (processor.process(parent)) break; parent = findParent(parent, project); } }
public static boolean processFilesRecursively( @NotNull VirtualFile root, @NotNull Processor<VirtualFile> processor) { if (!processor.process(root)) return false; if (root.isDirectory()) { final LinkedList<VirtualFile[]> queue = new LinkedList<VirtualFile[]>(); queue.add(root.getChildren()); do { final VirtualFile[] files = queue.removeFirst(); for (VirtualFile file : files) { if (!processor.process(file)) return false; if (file.isDirectory()) { queue.add(file.getChildren()); } } } while (!queue.isEmpty()); } return true; }
public static void processFilesRecursively( @NotNull VirtualFile root, @NotNull Processor<VirtualFile> processor, @NotNull Convertor<VirtualFile, Boolean> directoryFilter) { if (!processor.process(root)) return; if (root.isDirectory() && directoryFilter.convert(root)) { final LinkedList<VirtualFile[]> queue = new LinkedList<VirtualFile[]>(); queue.add(root.getChildren()); do { final VirtualFile[] files = queue.removeFirst(); for (VirtualFile file : files) { if (!processor.process(file)) return; if (file.isDirectory() && directoryFilter.convert(file)) { queue.add(file.getChildren()); } } } while (!queue.isEmpty()); } }
/** * The difference from FileUtil.processFilesRecursively() is that if processor returns false * children processing is cancelled, but overall processing doesn't stop */ private static boolean processFilesRecursively( @NotNull File root, @NotNull Processor<File> processor) { final LinkedList<File> queue = new LinkedList<File>(); queue.add(root); while (!queue.isEmpty()) { final File file = queue.removeFirst(); if (processor.process(file) && file.isDirectory()) { final File[] children = file.listFiles(); if (children != null) { ContainerUtil.addAll(queue, children); } } } return true; }
public static boolean visitFiles(@NotNull File root, @NotNull Processor<File> processor) { if (!processor.process(root)) { return false; } File[] children = root.listFiles(); if (children != null) { for (File child : children) { if (!visitFiles(child, processor)) { return false; } } } return true; }
private static int addToUsages( @NotNull Document document, @NotNull Processor<UsageInfo> consumer, @NotNull FindModel findModel, @NotNull final PsiFile psiFile, @NotNull int[] offsetRef, int maxUsages) { int count = 0; CharSequence text = document.getCharsSequence(); int textLength = document.getTextLength(); int offset = offsetRef[0]; Project project = psiFile.getProject(); FindManager findManager = FindManager.getInstance(project); while (offset < textLength) { FindResult result = findManager.findString(text, offset, findModel, psiFile.getVirtualFile()); if (!result.isStringFound()) break; final SearchScope customScope = findModel.getCustomScope(); if (customScope instanceof LocalSearchScope) { final TextRange range = new TextRange(result.getStartOffset(), result.getEndOffset()); if (!((LocalSearchScope) customScope).containsRange(psiFile, range)) break; } UsageInfo info = new FindResultUsageInfo(findManager, psiFile, offset, findModel, result); if (!consumer.process(info)) { throw new ProcessCanceledException(); } count++; final int prevOffset = offset; offset = result.getEndOffset(); if (prevOffset == offset) { // for regular expr the size of the match could be zero -> could be infinite loop in finding // usages! ++offset; } if (maxUsages > 0 && count >= maxUsages) { break; } } offsetRef[0] = offset; return count; }
public static boolean processImplementations( final PsiClass psiClass, final Processor<PsiElement> processor, SearchScope scope) { final boolean showInterfaces = Registry.is("ide.goto.implementation.show.interfaces"); if (!ClassInheritorsSearch.search(psiClass, scope, true) .forEach( new PsiElementProcessorAdapter<>( element -> { if (!showInterfaces && element.isInterface()) { return true; } return processor.process(element); }))) { return false; } return FunctionalExpressionSearch.search(psiClass, scope) .forEach((Processor<PsiFunctionalExpression>) processor::process); }
// returns number of hits static int processUsagesInFile( @NotNull final PsiFile psiFile, @NotNull final FindModel findModel, @NotNull final Processor<UsageInfo> consumer) { if (findModel.getStringToFind().isEmpty()) { if (!ApplicationManager.getApplication() .runReadAction((Computable<Boolean>) () -> consumer.process(new UsageInfo(psiFile)))) { throw new ProcessCanceledException(); } return 1; } final VirtualFile virtualFile = psiFile.getVirtualFile(); if (virtualFile == null) return 0; if (virtualFile.getFileType().isBinary()) return 0; // do not decompile .class files final Document document = ApplicationManager.getApplication() .runReadAction( (Computable<Document>) () -> virtualFile.isValid() ? FileDocumentManager.getInstance().getDocument(virtualFile) : null); if (document == null) return 0; final int[] offset = {0}; int count = 0; int found; ProgressIndicator indicator = ProgressWrapper.unwrap(ProgressManager.getInstance().getProgressIndicator()); TooManyUsagesStatus tooManyUsagesStatus = TooManyUsagesStatus.getFrom(indicator); do { tooManyUsagesStatus.pauseProcessingIfTooManyUsages(); // wait for user out of read action found = ApplicationManager.getApplication() .runReadAction( (Computable<Integer>) () -> { if (!psiFile.isValid()) return 0; return addToUsages( document, consumer, findModel, psiFile, offset, USAGES_PER_READ_ACTION); }); count += found; } while (found != 0); return count; }
@Override public boolean processAttributeChildrenDescriptions( final Processor<AttributeChildDescriptionImpl> processor) { final Set<AttributeChildDescriptionImpl> visited = new THashSet<AttributeChildDescriptionImpl>(); if (!myStaticGenericInfo.processAttributeChildrenDescriptions( new Processor<AttributeChildDescriptionImpl>() { public boolean process(AttributeChildDescriptionImpl attributeChildDescription) { visited.add(attributeChildDescription); return processor.process(attributeChildDescription); } })) { return false; } for (final AttributeChildDescriptionImpl description : getAttributeChildrenDescriptions()) { if (!visited.contains(description) && !processor.process(description)) { return false; } } return true; }