@NotNull private static Collection<PsiLanguageInjectionHost> collectInjectionHosts( @NotNull PsiFile file, @NotNull TextRange range) { Stack<PsiElement> toProcess = new Stack<PsiElement>(); for (PsiElement e = file.findElementAt(range.getStartOffset()); e != null; e = e.getNextSibling()) { if (e.getTextRange().getStartOffset() >= range.getEndOffset()) { break; } toProcess.push(e); } if (toProcess.isEmpty()) { return Collections.emptySet(); } Set<PsiLanguageInjectionHost> result = null; while (!toProcess.isEmpty()) { PsiElement e = toProcess.pop(); if (e instanceof PsiLanguageInjectionHost) { if (result == null) { result = ContainerUtilRt.newHashSet(); } result.add((PsiLanguageInjectionHost) e); } else { for (PsiElement child = e.getFirstChild(); child != null; child = child.getNextSibling()) { if (e.getTextRange().getStartOffset() >= range.getEndOffset()) { break; } toProcess.push(child); } } } return result == null ? Collections.<PsiLanguageInjectionHost>emptySet() : result; }
@NotNull private Collection<DefaultMutableTreeNode> getSelectedNodes() { if (!isValid()) { return Collections.emptyList(); } JTree tree = myStructureViewComponent.getTree(); if (tree == null) return Collections.emptyList(); TreePath[] selected = tree.getSelectionModel().getSelectionPaths(); if (selected == null || selected.length == 0) return Collections.emptyList(); return ContainerUtil.map( selected, new Function<TreePath, DefaultMutableTreeNode>() { @Override public DefaultMutableTreeNode fun(TreePath treePath) { return (DefaultMutableTreeNode) treePath.getLastPathComponent(); } }); }
@Override public void doCollectInformation(@NotNull ProgressIndicator progress) { assert myDocument != null; final Long stamp = myEditor.getUserData(LAST_TIME_INDENTS_BUILT); if (stamp != null && stamp.longValue() == nowStamp()) return; myDescriptors = buildDescriptors(); ArrayList<TextRange> ranges = new ArrayList<TextRange>(); for (IndentGuideDescriptor descriptor : myDescriptors) { ProgressManager.checkCanceled(); int endOffset = descriptor.endLine < myDocument.getLineCount() ? myDocument.getLineStartOffset(descriptor.endLine) : myDocument.getTextLength(); ranges.add(new TextRange(myDocument.getLineStartOffset(descriptor.startLine), endOffset)); } Collections.sort(ranges, RANGE_COMPARATOR); myRanges = ranges; }
public static List<TemplateImpl> findMatchingTemplates( CharSequence text, int caretOffset, @Nullable Character shortcutChar, TemplateSettings settings, boolean hasArgument) { List<TemplateImpl> candidates = Collections.emptyList(); for (int i = settings.getMaxKeyLength(); i >= 1; i--) { int wordStart = caretOffset - i; if (wordStart < 0) { continue; } String key = text.subSequence(wordStart, caretOffset).toString(); if (Character.isJavaIdentifierStart(key.charAt(0))) { if (wordStart > 0 && Character.isJavaIdentifierPart(text.charAt(wordStart - 1))) { continue; } } candidates = settings.collectMatchingCandidates(key, shortcutChar, hasArgument); if (!candidates.isEmpty()) break; } return candidates; }
private List<IndentGuideDescriptor> buildDescriptors() { if (!myEditor.getSettings().isIndentGuidesShown()) return Collections.emptyList(); IndentsCalculator calculator = new IndentsCalculator(); calculator.calculate(); int[] lineIndents = calculator.lineIndents; TIntIntHashMap effectiveCommentColumns = calculator.indentAfterUncomment; List<IndentGuideDescriptor> descriptors = new ArrayList<IndentGuideDescriptor>(); IntStack lines = new IntStack(); IntStack indents = new IntStack(); lines.push(0); indents.push(0); assert myDocument != null; final CharSequence chars = myDocument.getCharsSequence(); for (int line = 1; line < lineIndents.length; line++) { ProgressManager.checkCanceled(); int curIndent = lineIndents[line]; while (!indents.empty() && curIndent <= indents.peek()) { ProgressManager.checkCanceled(); final int level = indents.pop(); int startLine = lines.pop(); if (level > 0) { boolean addDescriptor = effectiveCommentColumns.contains(startLine); // Indent started at comment if (!addDescriptor) { for (int i = startLine; i < line; i++) { if (level != lineIndents[i] && level != effectiveCommentColumns.get(i)) { addDescriptor = true; break; } } } if (addDescriptor) { descriptors.add(createDescriptor(level, startLine, line, chars)); } } } int prevLine = line - 1; int prevIndent = lineIndents[prevLine]; if (curIndent - prevIndent > 1) { lines.push(prevLine); indents.push(prevIndent); } } while (!indents.empty()) { ProgressManager.checkCanceled(); final int level = indents.pop(); int startLine = lines.pop(); if (level > 0) { descriptors.add(createDescriptor(level, startLine, myDocument.getLineCount(), chars)); } } return descriptors; }
private static TextRange preprocess(@NotNull final ASTNode node, @NotNull TextRange range) { TextRange result = range; PsiElement psi = node.getPsi(); if (!psi.isValid()) { for (PreFormatProcessor processor : Extensions.getExtensions(PreFormatProcessor.EP_NAME)) { result = processor.process(node, result); } return result; } PsiFile file = psi.getContainingFile(); // We use a set here because we encountered a situation when more than one PSI leaf points to // the same injected fragment // (at least for sql injected into sql). final LinkedHashSet<TextRange> injectedFileRangesSet = ContainerUtilRt.newLinkedHashSet(); if (!psi.getProject().isDefault()) { List<DocumentWindow> injectedDocuments = InjectedLanguageUtil.getCachedInjectedDocuments(file); if (!injectedDocuments.isEmpty()) { for (DocumentWindow injectedDocument : injectedDocuments) { injectedFileRangesSet.add( TextRange.from(injectedDocument.injectedToHost(0), injectedDocument.getTextLength())); } } else { Collection<PsiLanguageInjectionHost> injectionHosts = collectInjectionHosts(file, range); PsiLanguageInjectionHost.InjectedPsiVisitor visitor = new PsiLanguageInjectionHost.InjectedPsiVisitor() { @Override public void visit( @NotNull PsiFile injectedPsi, @NotNull List<PsiLanguageInjectionHost.Shred> places) { for (PsiLanguageInjectionHost.Shred place : places) { Segment rangeMarker = place.getHostRangeMarker(); injectedFileRangesSet.add( TextRange.create(rangeMarker.getStartOffset(), rangeMarker.getEndOffset())); } } }; for (PsiLanguageInjectionHost host : injectionHosts) { InjectedLanguageUtil.enumerate(host, visitor); } } } if (!injectedFileRangesSet.isEmpty()) { List<TextRange> ranges = ContainerUtilRt.newArrayList(injectedFileRangesSet); Collections.reverse(ranges); for (TextRange injectedFileRange : ranges) { int startHostOffset = injectedFileRange.getStartOffset(); int endHostOffset = injectedFileRange.getEndOffset(); if (startHostOffset >= range.getStartOffset() && endHostOffset <= range.getEndOffset()) { PsiFile injected = InjectedLanguageUtil.findInjectedPsiNoCommit(file, startHostOffset); if (injected != null) { int startInjectedOffset = range.getStartOffset() > startHostOffset ? startHostOffset - range.getStartOffset() : 0; int endInjectedOffset = injected.getTextLength(); if (range.getEndOffset() < endHostOffset) { endInjectedOffset -= endHostOffset - range.getEndOffset(); } final TextRange initialInjectedRange = TextRange.create(startInjectedOffset, endInjectedOffset); TextRange injectedRange = initialInjectedRange; for (PreFormatProcessor processor : Extensions.getExtensions(PreFormatProcessor.EP_NAME)) { injectedRange = processor.process(injected.getNode(), injectedRange); } // Allow only range expansion (not reduction) for injected context. if ((initialInjectedRange.getStartOffset() > injectedRange.getStartOffset() && initialInjectedRange.getStartOffset() > 0) || (initialInjectedRange.getEndOffset() < injectedRange.getEndOffset() && initialInjectedRange.getEndOffset() < injected.getTextLength())) { range = TextRange.create( range.getStartOffset() + injectedRange.getStartOffset() - initialInjectedRange.getStartOffset(), range.getEndOffset() + initialInjectedRange.getEndOffset() - injectedRange.getEndOffset()); } } } } } for (PreFormatProcessor processor : Extensions.getExtensions(PreFormatProcessor.EP_NAME)) { result = processor.process(node, result); } return result; }