@Override public ASTNode parseContents(ASTNode chameleon) { final CharTable table = SharedImplUtil.findCharTableByTree(chameleon); final FileElement treeElement = new DummyHolder(((TreeElement) chameleon).getManager(), null, table).getTreeElement(); final PsiFile file = (PsiFile) TreeUtil.getFileElement((TreeElement) chameleon).getPsi(); PsiFile originalFile = file.getOriginalFile(); final TemplateLanguageFileViewProvider viewProvider = (TemplateLanguageFileViewProvider) originalFile.getViewProvider(); final Language language = getTemplateFileLanguage(viewProvider); final CharSequence chars = chameleon.getChars(); final PsiFile templateFile = createTemplateFile(file, language, chars, viewProvider); final TreeElement parsed = ((PsiFileImpl) templateFile).calcTreeElement(); Lexer langLexer = LanguageParserDefinitions.INSTANCE.forLanguage(language).createLexer(file.getProject()); final Lexer lexer = new MergingLexerAdapter( new TemplateBlackAndWhiteLexer( createBaseLexer(viewProvider), langLexer, myTemplateElementType, myOuterElementType), TokenSet.create(myTemplateElementType, myOuterElementType)); lexer.start(chars); insertOuters(parsed, lexer, table); if (parsed != null) { final TreeElement element = parsed.getFirstChildNode(); if (element != null) { ((CompositeElement) parsed).rawRemoveAllChildren(); treeElement.rawAddChildren(element); } } treeElement.subtreeChanged(); TreeElement childNode = treeElement.getFirstChildNode(); DebugUtil.checkTreeStructure(parsed); DebugUtil.checkTreeStructure(treeElement); DebugUtil.checkTreeStructure(chameleon); DebugUtil.checkTreeStructure(file.getNode()); DebugUtil.checkTreeStructure(originalFile.getNode()); return childNode; }
private void checkPsiIsCorrect(final FileViewProvider key) { PsiFile actualPsi = key.getPsi(key.getBaseLanguage()); PsiTreeDebugBuilder treeDebugBuilder = new PsiTreeDebugBuilder().setShowErrorElements(false).setShowWhiteSpaces(false); String actualPsiTree = treeDebugBuilder.psiToString(actualPsi); String fileName = key.getVirtualFile().getName(); PsiFile psi = PsiFileFactory.getInstance(myProject) .createFileFromText( fileName, FileTypeManager.getInstance().getFileTypeByFileName(fileName), actualPsi.getNode().getText(), LocalTimeCounter.currentTime(), false); if (actualPsi.getClass().equals(psi.getClass())) { String expectedPsi = treeDebugBuilder.psiToString(psi); if (!expectedPsi.equals(actualPsiTree)) { myReformatElements.clear(); assert expectedPsi.equals(actualPsiTree) : "Refactored psi should be the same as result of parsing"; } } }
public static void assertInvalidRanges( final int startOffset, final int newEndOffset, FormattingDocumentModel model, String message) { @NonNls final StringBuilder buffer = new StringBuilder(); buffer.append("Invalid formatting blocks:").append(message).append("\n"); buffer.append("Start offset:"); buffer.append(startOffset); buffer.append(" end offset:"); buffer.append(newEndOffset); buffer.append("\n"); int minOffset = Math.max(Math.min(startOffset, newEndOffset) - 20, 0); int maxOffset = Math.min(Math.max(startOffset, newEndOffset) + 20, model.getTextLength()); buffer .append("Affected text fragment:[") .append(minOffset) .append(",") .append(maxOffset) .append("] - '") .append(model.getText(new TextRange(minOffset, maxOffset))) .append("'\n"); final StringBuilder messageBuffer = new StringBuilder(); messageBuffer.append("Invalid ranges during formatting"); if (model instanceof FormattingDocumentModelImpl) { messageBuffer .append(" in ") .append(((FormattingDocumentModelImpl) model).getFile().getLanguage()); } buffer.append("File text:(").append(model.getTextLength()).append(")\n'"); buffer.append(model.getText(new TextRange(0, model.getTextLength())).toString()); buffer.append("'\n"); buffer.append("model (").append(model.getClass()).append("): ").append(model); Throwable currentThrowable = new Throwable(); if (model instanceof FormattingDocumentModelImpl) { final FormattingDocumentModelImpl modelImpl = (FormattingDocumentModelImpl) model; buffer.append("Psi Tree:\n"); final PsiFile file = modelImpl.getFile(); final List<PsiFile> roots = file.getViewProvider().getAllFiles(); for (PsiFile root : roots) { buffer.append("Root "); DebugUtil.treeToBuffer(buffer, root.getNode(), 0, false, true, true, true); } buffer.append('\n'); currentThrowable = makeLanguageStackTrace(currentThrowable, file); } LogMessageEx.error(LOG, messageBuffer.toString(), currentThrowable, buffer.toString()); }
@NotNull private static XmlFileHeader calcXmlFileHeader(final PsiFile file) { // if (file.getFileType() == XmlFileType.INSTANCE) { // VirtualFile virtualFile = file.getVirtualFile(); // if (virtualFile instanceof VirtualFileWithId) { // ObjectStubTree tree = StubTreeLoader.getInstance().readFromVFile(file.getProject(), // virtualFile); // if (tree != null) { // return ((FileStub)tree.getRoot()).getHeader(); // } // } // } if (file instanceof XmlFile && file.getNode().isParsed()) { final XmlDocument document = ((XmlFile) file).getDocument(); if (document != null) { String publicId = null; String systemId = null; final XmlProlog prolog = document.getProlog(); if (prolog != null) { final XmlDoctype doctype = prolog.getDoctype(); if (doctype != null) { publicId = doctype.getPublicId(); systemId = doctype.getSystemId(); if (systemId == null) { systemId = doctype.getDtdUri(); } } } final XmlTag tag = document.getRootTag(); if (tag != null) { String localName = tag.getLocalName(); if (StringUtil.isNotEmpty(localName)) { if (tag.getPrevSibling() instanceof PsiErrorElement) { return XmlFileHeader.EMPTY; } String psiNs = tag.getNamespace(); return new XmlFileHeader( localName, psiNs == XmlUtil.EMPTY_URI || Comparing.equal(psiNs, systemId) ? null : psiNs, publicId, systemId); } } } return XmlFileHeader.EMPTY; } if (!file.isValid()) return XmlFileHeader.EMPTY; return NanoXmlUtil.parseHeader(file); }
@NotNull private static Map<PsiElement, FoldingDescriptor> buildRanges( @NotNull Editor editor, @NotNull PsiFile psiFile) { final FoldingBuilder foldingBuilder = LanguageFolding.INSTANCE.forLanguage(psiFile.getLanguage()); final ASTNode node = psiFile.getNode(); if (node == null) return Collections.emptyMap(); final FoldingDescriptor[] descriptors = LanguageFolding.buildFoldingDescriptors( foldingBuilder, psiFile, editor.getDocument(), true); Map<PsiElement, FoldingDescriptor> ranges = new HashMap<PsiElement, FoldingDescriptor>(); for (FoldingDescriptor descriptor : descriptors) { final ASTNode ast = descriptor.getElement(); final PsiElement psi = ast.getPsi(); if (psi != null) { ranges.put(psi, descriptor); } } return ranges; }
@NotNull static PsiElement loadTree(@NotNull PsiElement host, @NotNull PsiFile containingFile) { if (containingFile instanceof DummyHolder) { PsiElement context = containingFile.getContext(); if (context != null) { PsiFile topFile = context.getContainingFile(); topFile.getNode(); // load tree TextRange textRange = host.getTextRange().shiftRight(context.getTextRange().getStartOffset()); PsiElement inLoadedTree = PsiTreeUtil.findElementOfClassAtRange( topFile, textRange.getStartOffset(), textRange.getEndOffset(), host.getClass()); if (inLoadedTree != null) { host = inLoadedTree; } } } return host; }
private static Collection<String> suggestKeywords(PsiElement position) { TextRange posRange = position.getTextRange(); BnfFile posFile = (BnfFile) position.getContainingFile(); BnfRule statement = PsiTreeUtil.getTopmostParentOfType(position, BnfRule.class); final TextRange range; if (statement != null) { range = new TextRange(statement.getTextRange().getStartOffset(), posRange.getStartOffset()); } else { int offset = posRange.getStartOffset(); for (PsiElement cur = GrammarUtil.getDummyAwarePrevSibling(position); cur != null; cur = GrammarUtil.getDummyAwarePrevSibling(cur)) { if (cur instanceof BnfAttrs) offset = cur.getTextRange().getEndOffset(); else if (cur instanceof BnfRule) offset = cur.getTextRange().getStartOffset(); else continue; break; } range = new TextRange(offset, posRange.getStartOffset()); } final String text = range.isEmpty() ? CompletionInitializationContext.DUMMY_IDENTIFIER : range.substring(posFile.getText()); PsiFile file = PsiFileFactory.getInstance(posFile.getProject()) .createFileFromText("a.bnf", BnfLanguage.INSTANCE, text, true, false); int completionOffset = posRange.getStartOffset() - range.getStartOffset(); GeneratedParserUtilBase.CompletionState state = new GeneratedParserUtilBase.CompletionState(completionOffset) { @Override public String convertItem(Object o) { // we do not have other keywords return o instanceof String ? (String) o : null; } }; file.putUserData(GeneratedParserUtilBase.COMPLETION_STATE_KEY, state); TreeUtil.ensureParsed(file.getNode()); return state.items; }
private void doSync( @NotNull final PsiTreeChangeEvent event, boolean force, @NotNull final DocSyncAction syncAction) { if (!toProcessPsiEvent()) return; final PsiFile psiFile = event.getFile(); if (psiFile == null || psiFile.getNode() == null) return; final DocumentEx document = (DocumentEx) myPsiDocumentManager.getCachedDocument(psiFile); if (document == null || document instanceof DocumentWindow) return; if (!force && getTransaction(document) == null) { return; } TextBlock textBlock = TextBlock.get(psiFile); if (!textBlock.isEmpty()) { throw new IllegalStateException("Attempt to modify PSI for non-committed Document!"); } textBlock.performAtomically( new Runnable() { @Override public void run() { syncAction.syncDocument(document, (PsiTreeChangeEventImpl) event); } }); final boolean insideTransaction = myTransactionsMap.containsKey(document); if (!insideTransaction) { document.setModificationStamp(psiFile.getViewProvider().getModificationStamp()); if (LOG.isDebugEnabled()) { PsiDocumentManagerBase.checkConsistency(psiFile, document); } } psiFile.getViewProvider().contentsSynchronized(); }
public static void cacheSingleFile(Project project, VirtualFile openFile) { if (openFile.exists()) { // cache attributes PsiManager psiManager = PsiManager.getInstance(project); if (psiManager == null) return; PsiFile psiFile = psiManager.findFile(openFile); if (psiFile == null) return; FileASTNode astNode = psiFile.getNode(); if (astNode == null) return; HashSet<String> rs = findAllVariables(astNode.getChildren(null), PerlTypes.VARIABLE, false); if (rs == null) return; for (String str : rs) { addCachedVariables(null, str); } // cache subs ArrayList<Package> packages = ModulesContainer.getPackageListFromFile(openFile.getPath()); for (int i = 0; i < packages.size(); i++) { ArrayList<Sub> subs = packages.get(i).getAllSubs(); for (int j = 0; j < subs.size(); j++) { addCachedSub(null, subs.get(j)); } } } }
public static boolean checkConsistency(PsiFile psiFile, Document document) { // todo hack if (psiFile.getVirtualFile() == null) return true; CharSequence editorText = document.getCharsSequence(); int documentLength = document.getTextLength(); if (psiFile.textMatches(editorText)) { LOG.assertTrue(psiFile.getTextLength() == documentLength); return true; } char[] fileText = psiFile.textToCharArray(); @SuppressWarnings({"NonConstantStringShouldBeStringBuffer"}) @NonNls String error = "File '" + psiFile.getName() + "' text mismatch after reparse. " + "File length=" + fileText.length + "; Doc length=" + documentLength + "\n"; int i = 0; for (; i < documentLength; i++) { if (i >= fileText.length) { error += "editorText.length > psiText.length i=" + i + "\n"; break; } if (i >= editorText.length()) { error += "editorText.length > psiText.length i=" + i + "\n"; break; } if (editorText.charAt(i) != fileText[i]) { error += "first unequal char i=" + i + "\n"; break; } } // error += "*********************************************" + "\n"; // if (i <= 500){ // error += "Equal part:" + editorText.subSequence(0, i) + "\n"; // } // else{ // error += "Equal part start:\n" + editorText.subSequence(0, 200) + "\n"; // error += "................................................" + "\n"; // error += "................................................" + "\n"; // error += "................................................" + "\n"; // error += "Equal part end:\n" + editorText.subSequence(i - 200, i) + "\n"; // } error += "*********************************************" + "\n"; error += "Editor Text tail:(" + (documentLength - i) + ")\n"; // + editorText.subSequence(i, Math.min(i + 300, documentLength)) + "\n"; error += "*********************************************" + "\n"; error += "Psi Text tail:(" + (fileText.length - i) + ")\n"; error += "*********************************************" + "\n"; if (document instanceof DocumentWindow) { error += "doc: '" + document.getText() + "'\n"; error += "psi: '" + psiFile.getText() + "'\n"; error += "ast: '" + psiFile.getNode().getText() + "'\n"; error += psiFile.getLanguage() + "\n"; PsiElement context = InjectedLanguageManager.getInstance(psiFile.getProject()).getInjectionHost(psiFile); if (context != null) { error += "context: " + context + "; text: '" + context.getText() + "'\n"; error += "context file: " + context.getContainingFile() + "\n"; } error += "document window ranges: " + Arrays.asList(((DocumentWindow) document).getHostRanges()) + "\n"; } LOG.error(error); // document.replaceString(0, documentLength, psiFile.getText()); return false; }
private static TextRange preprocess(@NotNull final ASTNode node, @NotNull TextRange range) { TextRange result = range; PsiElement psi = node.getPsi(); if (!psi.isValid()) { for (PreFormatProcessor processor : Extensions.getExtensions(PreFormatProcessor.EP_NAME)) { result = processor.process(node, result); } return result; } PsiFile file = psi.getContainingFile(); // We use a set here because we encountered a situation when more than one PSI leaf points to // the same injected fragment // (at least for sql injected into sql). final LinkedHashSet<TextRange> injectedFileRangesSet = ContainerUtilRt.newLinkedHashSet(); if (!psi.getProject().isDefault()) { List<DocumentWindow> injectedDocuments = InjectedLanguageUtil.getCachedInjectedDocuments(file); if (!injectedDocuments.isEmpty()) { for (DocumentWindow injectedDocument : injectedDocuments) { injectedFileRangesSet.add( TextRange.from(injectedDocument.injectedToHost(0), injectedDocument.getTextLength())); } } else { Collection<PsiLanguageInjectionHost> injectionHosts = collectInjectionHosts(file, range); PsiLanguageInjectionHost.InjectedPsiVisitor visitor = new PsiLanguageInjectionHost.InjectedPsiVisitor() { @Override public void visit( @NotNull PsiFile injectedPsi, @NotNull List<PsiLanguageInjectionHost.Shred> places) { for (PsiLanguageInjectionHost.Shred place : places) { Segment rangeMarker = place.getHostRangeMarker(); injectedFileRangesSet.add( TextRange.create(rangeMarker.getStartOffset(), rangeMarker.getEndOffset())); } } }; for (PsiLanguageInjectionHost host : injectionHosts) { InjectedLanguageUtil.enumerate(host, visitor); } } } if (!injectedFileRangesSet.isEmpty()) { List<TextRange> ranges = ContainerUtilRt.newArrayList(injectedFileRangesSet); Collections.reverse(ranges); for (TextRange injectedFileRange : ranges) { int startHostOffset = injectedFileRange.getStartOffset(); int endHostOffset = injectedFileRange.getEndOffset(); if (startHostOffset >= range.getStartOffset() && endHostOffset <= range.getEndOffset()) { PsiFile injected = InjectedLanguageUtil.findInjectedPsiNoCommit(file, startHostOffset); if (injected != null) { int startInjectedOffset = range.getStartOffset() > startHostOffset ? startHostOffset - range.getStartOffset() : 0; int endInjectedOffset = injected.getTextLength(); if (range.getEndOffset() < endHostOffset) { endInjectedOffset -= endHostOffset - range.getEndOffset(); } final TextRange initialInjectedRange = TextRange.create(startInjectedOffset, endInjectedOffset); TextRange injectedRange = initialInjectedRange; for (PreFormatProcessor processor : Extensions.getExtensions(PreFormatProcessor.EP_NAME)) { injectedRange = processor.process(injected.getNode(), injectedRange); } // Allow only range expansion (not reduction) for injected context. if ((initialInjectedRange.getStartOffset() > injectedRange.getStartOffset() && initialInjectedRange.getStartOffset() > 0) || (initialInjectedRange.getEndOffset() < injectedRange.getEndOffset() && initialInjectedRange.getEndOffset() < injected.getTextLength())) { range = TextRange.create( range.getStartOffset() + injectedRange.getStartOffset() - initialInjectedRange.getStartOffset(), range.getEndOffset() + initialInjectedRange.getEndOffset() - injectedRange.getEndOffset()); } } } } } for (PreFormatProcessor processor : Extensions.getExtensions(PreFormatProcessor.EP_NAME)) { result = processor.process(node, result); } return result; }
public void processText( PsiFile file, final FormatTextRanges ranges, boolean doPostponedFormatting) { final Project project = file.getProject(); Document document = PsiDocumentManager.getInstance(project).getDocument(file); final List<FormatTextRanges.FormatTextRange> textRanges = ranges.getRanges(); if (document instanceof DocumentWindow) { file = InjectedLanguageManager.getInstance(file.getProject()).getTopLevelFile(file); final DocumentWindow documentWindow = (DocumentWindow) document; for (FormatTextRanges.FormatTextRange range : textRanges) { range.setTextRange(documentWindow.injectedToHost(range.getTextRange())); } document = documentWindow.getDelegate(); } final FormattingModelBuilder builder = LanguageFormatting.INSTANCE.forContext(file); if (builder != null) { if (file.getTextLength() > 0) { try { final PsiElement startElement = file.findElementAt(textRanges.get(0).getTextRange().getStartOffset()); final PsiElement endElement = file.findElementAt( textRanges.get(textRanges.size() - 1).getTextRange().getEndOffset() - 1); final PsiElement commonParent = startElement != null && endElement != null ? PsiTreeUtil.findCommonParent(startElement, endElement) : null; ASTNode node = null; if (commonParent != null) { node = commonParent.getNode(); } if (node == null) { node = file.getNode(); } for (FormatTextRanges.FormatTextRange range : ranges.getRanges()) { TextRange rangeToUse = preprocess(node, range.getTextRange()); range.setTextRange(rangeToUse); } if (doPostponedFormatting) { RangeMarker[] markers = new RangeMarker[textRanges.size()]; int i = 0; for (FormatTextRanges.FormatTextRange range : textRanges) { TextRange textRange = range.getTextRange(); int start = textRange.getStartOffset(); int end = textRange.getEndOffset(); if (start >= 0 && end > start && end <= document.getTextLength()) { markers[i] = document.createRangeMarker(textRange); markers[i].setGreedyToLeft(true); markers[i].setGreedyToRight(true); i++; } } final PostprocessReformattingAspect component = file.getProject().getComponent(PostprocessReformattingAspect.class); FormattingProgressTask.FORMATTING_CANCELLED_FLAG.set(false); component.doPostponedFormatting(file.getViewProvider()); i = 0; for (FormatTextRanges.FormatTextRange range : textRanges) { RangeMarker marker = markers[i]; if (marker != null) { range.setTextRange(TextRange.create(marker)); marker.dispose(); } i++; } } if (FormattingProgressTask.FORMATTING_CANCELLED_FLAG.get()) { return; } final FormattingModel originalModel = CoreFormatterUtil.buildModel(builder, file, mySettings, FormattingMode.REFORMAT); final FormattingModel model = new DocumentBasedFormattingModel( originalModel.getRootBlock(), document, project, mySettings, file.getFileType(), file); FormatterEx formatter = FormatterEx.getInstanceEx(); if (CodeStyleManager.getInstance(project).isSequentialProcessingAllowed()) { formatter.setProgressTask(new FormattingProgressTask(project, file, document)); } CommonCodeStyleSettings.IndentOptions indentOptions = null; if (builder instanceof FormattingModelBuilderEx) { indentOptions = ((FormattingModelBuilderEx) builder) .getIndentOptionsToUse(file, ranges, mySettings); } if (indentOptions == null) { indentOptions = mySettings.getIndentOptions(file.getFileType()); } formatter.format(model, mySettings, indentOptions, ranges); for (FormatTextRanges.FormatTextRange range : textRanges) { TextRange textRange = range.getTextRange(); wrapLongLinesIfNecessary( file, document, textRange.getStartOffset(), textRange.getEndOffset()); } } catch (IncorrectOperationException e) { LOG.error(e); } } } }