@Override public Item item(final QueryContext qc, final InputInfo ii) throws QueryException { checkCreate(qc); final Path path = toPath(0, qc); final B64 archive = toB64(exprs[1], qc, false); final TokenSet hs = entries(2, qc); try (ArchiveIn in = ArchiveIn.get(archive.input(info), info)) { while (in.more()) { final ZipEntry ze = in.entry(); final String name = ze.getName(); if (hs == null || hs.delete(token(name)) != 0) { final Path file = path.resolve(name); if (ze.isDirectory()) { Files.createDirectories(file); } else { Files.createDirectories(file.getParent()); Files.write(file, in.read()); } } } } catch (final IOException ex) { throw ARCH_FAIL_X.get(info, ex); } return null; }
/** * Extracts entries from the archive. * * @param ctx query context * @return text entries * @throws QueryException query exception */ private TokenList extract(final QueryContext ctx) throws QueryException { final B64 archive = (B64) checkType(checkItem(expr[0], ctx), AtomType.B64); TokenSet hs = null; if (expr.length > 1) { // filter result to specified entries hs = new TokenSet(); final Iter names = ctx.iter(expr[1]); for (Item en; (en = names.next()) != null; ) { hs.add(checkElmStr(en).string(info)); } } final TokenList tl = new TokenList(); final ArchiveIn in = ArchiveIn.get(archive.input(info), info); try { while (in.more()) { final ZipEntry ze = in.entry(); if (ze.isDirectory()) continue; if (hs == null || hs.delete(token(ze.getName())) != 0) tl.add(in.read()); } } catch (final IOException ex) { Util.debug(ex); ARCH_FAIL.thrw(info, ex); } finally { in.close(); } return tl; }
@Override public byte[] info(final MainOptions options) { final TokenBuilder tb = new TokenBuilder(); tb.add(LI_STRUCTURE).add(HASH).add(NL); tb.add(LI_NAMES).add(data.meta.names(type)).add(NL); final IndexStats stats = new IndexStats(options.get(MainOptions.MAXSTAT)); final int s = values.size(); for (int p = 1; p <= s; p++) { final int oc = lenList.get(p); if (oc > 0 && stats.adding(oc)) stats.add(values.key(p), oc); } stats.print(tb); return tb.finish(); }
@Override public IndexIterator iter(final IndexToken token) { final int id = values.id(token.get()); if (id == 0) return IndexIterator.EMPTY; final int len = lenList.get(id); final int[] ids = idsList.get(id), pres; if (data.meta.updindex) { final IntList tmp = new IntList(); for (int i = 0; i < len; ++i) tmp.add(data.pre(ids[i])); pres = tmp.sort().finish(); } else { pres = ids; } return new IndexIterator() { int p; @Override public boolean more() { return p < len; } @Override public int pre() { return pres[p++]; } @Override public int size() { return len; } }; }
@Override public void rehash() { super.rehash(); final int s = size << 1; ids = Array.copyOf(ids, s); len = Arrays.copyOf(len, s); }
/** * Constructor. * * @param data data instance * @param type index type */ public MemValues(final Data data, final IndexType type) { super(data, type); // token index: work extra token set instance values = type == IndexType.TOKEN ? new TokenSet() : ((MemData) data).values(type == IndexType.TEXT); final int s = values.size() + 1; idsList = new ArrayList<>(s); lenList = new IntList(s); reorder = new BoolList(s); }
/** * Removes values from the index. * * @param key key * @param vals sorted values */ void delete(final byte[] key, final int... vals) { final int id = values.id(key), vl = vals.length, l = lenList.get(id), s = l - vl; final int[] ids = idsList.get(id); for (int i = 0, n = 0, v = 0; i < l; i++) { if (v == vl || ids[i] != vals[v]) ids[n++] = ids[i]; else v++; } lenList.set(id, s); if (s == 0) idsList.set(id, null); }
/** * Adds values to the index. * * @param key key to be indexed * @param vals sorted values */ void add(final byte[] key, final int... vals) { // token index: add values. otherwise, reference existing values final int id = type == IndexType.TOKEN ? values.put(key) : values.id(key), vl = vals.length; // updatable index: if required, resize existing arrays while (idsList.size() < id + 1) idsList.add(null); if (lenList.size() < id + 1) lenList.set(id, 0); final int len = lenList.get(id), size = len + vl; int[] ids = idsList.get(id); if (ids == null) { ids = vals; } else { if (ids.length < size) ids = Arrays.copyOf(ids, Array.newSize(size)); System.arraycopy(vals, 0, ids, len, vl); if (ids[len - 1] > vals[0]) { if (reorder == null) reorder = new BoolList(values.size()); reorder.set(id, true); } } idsList.set(id, ids); lenList.set(id, size); }
/** * Caches and returns all unique tokens specified in a query. * * @param list token list * @return token set */ private TokenSet unique(final TokenList list) { // cache all query tokens in a set (duplicates are removed) final TokenSet ts = new TokenSet(); switch (mode) { case ALL: case ANY: for (final byte[] t : list) ts.add(t); break; case ALL_WORDS: case ANY_WORD: final FTLexer l = new FTLexer(ftt.opt); for (final byte[] t : list) { l.init(t); while (l.hasNext()) ts.add(l.nextToken()); } break; case PHRASE: final TokenBuilder tb = new TokenBuilder(); for (final byte[] t : list) tb.add(t).add(' '); ts.add(tb.trim().finish()); } return ts; }
@Override protected void rehash() { super.rehash(); funcs = Arrays.copyOf(funcs, size << 1); }
@Override public int costs(final IndexToken it) { return lenList.get(values.id(it.get())); }
@Override protected void rehash() { super.rehash(); values = Arrays.copyOf(values, size << 1); }
public class PsiBuilderQuickTest extends LightPlatformTestCase { private static final IFileElementType ROOT = new IFileElementType("ROOT", Language.ANY); private static final IElementType LETTER = new IElementType("LETTER", Language.ANY); private static final IElementType DIGIT = new IElementType("DIGIT", Language.ANY); private static final IElementType OTHER = new IElementType("OTHER", Language.ANY); private static final IElementType COLLAPSED = new IElementType("COLLAPSED", Language.ANY); private static final IElementType LEFT_BOUND = new IElementType("LEFT_BOUND", Language.ANY) { @Override public boolean isLeftBound() { return true; } }; private static final IElementType COMMENT = new IElementType("COMMENT", Language.ANY); private static final TokenSet WHITESPACE_SET = TokenSet.create(TokenType.WHITE_SPACE); private static final TokenSet COMMENT_SET = TokenSet.create(COMMENT); @SuppressWarnings("JUnitTestCaseWithNonTrivialConstructors") public PsiBuilderQuickTest() { PlatformTestCase.initPlatformLangPrefix(); } public void testPlain() { doTest( "a<<b", new Parser() { @Override public void parse(PsiBuilder builder) { while (builder.getTokenType() != null) { builder.advanceLexer(); } } }, "Element(ROOT)\n" + " PsiElement(LETTER)('a')\n" + " PsiElement(OTHER)('<')\n" + " PsiElement(OTHER)('<')\n" + " PsiElement(LETTER)('b')\n"); } public void testComposites() { doTest( "1(a(b)c)2(d)3", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilderUtil.advance(builder, 1); final PsiBuilder.Marker marker1 = builder.mark(); PsiBuilderUtil.advance(builder, 2); final PsiBuilder.Marker marker2 = builder.mark(); PsiBuilderUtil.advance(builder, 3); marker2.done(OTHER); PsiBuilderUtil.advance(builder, 2); marker1.done(OTHER); PsiBuilderUtil.advance(builder, 1); final PsiBuilder.Marker marker3 = builder.mark(); PsiBuilderUtil.advance(builder, 1); builder.mark().done(OTHER); PsiBuilderUtil.advance(builder, 2); marker3.done(OTHER); PsiBuilderUtil.advance(builder, 1); } }, "Element(ROOT)\n" + " PsiElement(DIGIT)('1')\n" + " Element(OTHER)\n" + " PsiElement(OTHER)('(')\n" + " PsiElement(LETTER)('a')\n" + " Element(OTHER)\n" + " PsiElement(OTHER)('(')\n" + " PsiElement(LETTER)('b')\n" + " PsiElement(OTHER)(')')\n" + " PsiElement(LETTER)('c')\n" + " PsiElement(OTHER)(')')\n" + " PsiElement(DIGIT)('2')\n" + " Element(OTHER)\n" + " PsiElement(OTHER)('(')\n" + " Element(OTHER)\n" + " <empty list>\n" + " PsiElement(LETTER)('d')\n" + " PsiElement(OTHER)(')')\n" + " PsiElement(DIGIT)('3')\n"); } public void testCollapse() { doTest( "a<<>>b", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilderUtil.advance(builder, 1); final PsiBuilder.Marker marker1 = builder.mark(); PsiBuilderUtil.advance(builder, 2); marker1.collapse(COLLAPSED); final PsiBuilder.Marker marker2 = builder.mark(); PsiBuilderUtil.advance(builder, 2); marker2.collapse(COLLAPSED); PsiBuilderUtil.advance(builder, 1); } }, "Element(ROOT)\n" + " PsiElement(LETTER)('a')\n" + " PsiElement(COLLAPSED)('<<')\n" + " PsiElement(COLLAPSED)('>>')\n" + " PsiElement(LETTER)('b')\n"); } public void testDoneAndError() { doTest( "a2b", new Parser() { @Override public void parse(PsiBuilder builder) { IElementType tokenType; while ((tokenType = builder.getTokenType()) != null) { final PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); if (tokenType == DIGIT) marker.error("no digits allowed"); else marker.done(tokenType); } } }, "Element(ROOT)\n" + " Element(LETTER)\n" + " PsiElement(LETTER)('a')\n" + " PsiErrorElement:no digits allowed\n" + " PsiElement(DIGIT)('2')\n" + " Element(LETTER)\n" + " PsiElement(LETTER)('b')\n"); } public void testPrecedeAndDoneBefore() { doTest( "ab", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker marker1 = builder.mark(); builder.advanceLexer(); final PsiBuilder.Marker marker2 = builder.mark(); builder.advanceLexer(); marker2.done(OTHER); marker2.precede().doneBefore(COLLAPSED, marker2); marker1.doneBefore(COLLAPSED, marker2, "with error"); } }, "Element(ROOT)\n" + " Element(COLLAPSED)\n" + " PsiElement(LETTER)('a')\n" + " Element(COLLAPSED)\n" + " <empty list>\n" + " PsiErrorElement:with error\n" + " <empty list>\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('b')\n"); } public void testErrorBefore() { doTest( "a1", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker letter = builder.mark(); builder.advanceLexer(); letter.done(LETTER); final PsiBuilder.Marker digit = builder.mark(); builder.advanceLexer(); digit.done(DIGIT); digit.precede().errorBefore("something lost", digit); } }, "Element(ROOT)\n" + " Element(LETTER)\n" + " PsiElement(LETTER)('a')\n" + " PsiErrorElement:something lost\n" + " <empty list>\n" + " Element(DIGIT)\n" + " PsiElement(DIGIT)('1')\n"); } public void testValidityChecksOnDone() { doFailTest( "a", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker first = builder.mark(); builder.advanceLexer(); builder.mark(); first.done(LETTER); } }, "Another not done marker added after this one. Must be done before this."); } public void testValidityChecksOnDoneBefore1() { doFailTest( "a", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker first = builder.mark(); builder.advanceLexer(); final PsiBuilder.Marker second = builder.mark(); second.precede(); first.doneBefore(LETTER, second); } }, "Another not done marker added after this one. Must be done before this."); } public void testValidityChecksOnDoneBefore2() { doFailTest( "a", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker first = builder.mark(); builder.advanceLexer(); final PsiBuilder.Marker second = builder.mark(); second.doneBefore(LETTER, first); } }, "'Before' marker precedes this one."); } public void testValidityChecksOnTreeBuild1() { doFailTest( "aa", new Parser() { @Override public void parse(PsiBuilder builder) { while (!builder.eof()) builder.advanceLexer(); } }, "Parser produced no markers. Text:\naa"); } public void testValidityChecksOnTreeBuild2() { doFailTest( "aa", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); marker.done(LETTER); } }, "Tokens [LETTER] were not inserted into the tree. Text:\naa"); } public void testValidityChecksOnTreeBuild3() { doFailTest( "a ", new Parser() { @Override public void parse(PsiBuilder builder) { final PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); marker.done(LETTER); while (!builder.eof()) builder.advanceLexer(); } }, "Tokens [WHITE_SPACE] are outside of root element \"LETTER\". Text:\na "); } public void testWhitespaceTrimming() { doTest( " a b ", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); marker.done(OTHER); marker = builder.mark(); builder.advanceLexer(); marker.done(OTHER); builder.advanceLexer(); } }, "Element(ROOT)\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('a')\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('b')\n" + " PsiWhiteSpace(' ')\n"); } public void testWhitespaceBalancingByErrors() { doTest( "a b c", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); builder.error("error 1"); marker.done(OTHER); marker = builder.mark(); builder.advanceLexer(); builder.mark().error("error 2"); marker.done(OTHER); marker = builder.mark(); builder.advanceLexer(); marker.error("error 3"); } }, "Element(ROOT)\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('a')\n" + " PsiErrorElement:error 1\n" + " <empty list>\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('b')\n" + " PsiErrorElement:error 2\n" + " <empty list>\n" + " PsiWhiteSpace(' ')\n" + " PsiErrorElement:error 3\n" + " PsiElement(LETTER)('c')\n"); } public void testWhitespaceBalancingByEmptyComposites() { doTest( "a b c", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); builder.mark().done(OTHER); marker.done(OTHER); marker = builder.mark(); builder.advanceLexer(); builder.mark().done(LEFT_BOUND); marker.done(OTHER); builder.advanceLexer(); } }, "Element(ROOT)\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('a')\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " <empty list>\n" + " Element(OTHER)\n" + " PsiElement(LETTER)('b')\n" + " Element(LEFT_BOUND)\n" + " <empty list>\n" + " PsiWhiteSpace(' ')\n" + " PsiElement(LETTER)('c')\n"); } public void testCustomEdgeProcessors() { final WhitespacesAndCommentsBinder leftEdgeProcessor = new WhitespacesAndCommentsBinder() { @Override public int getEdgePosition( List<IElementType> tokens, boolean atStreamEdge, TokenTextGetter getter) { int pos = tokens.size() - 1; while (tokens.get(pos) != COMMENT && pos > 0) pos--; return pos; } }; final WhitespacesAndCommentsBinder rightEdgeProcessor = new WhitespacesAndCommentsBinder() { @Override public int getEdgePosition( List<IElementType> tokens, boolean atStreamEdge, TokenTextGetter getter) { int pos = 0; while (tokens.get(pos) != COMMENT && pos < tokens.size() - 1) pos++; return pos + 1; } }; doTest( "{ # i # }", new Parser() { @Override public void parse(PsiBuilder builder) { while (builder.getTokenType() != LETTER) builder.advanceLexer(); final PsiBuilder.Marker marker = builder.mark(); builder.advanceLexer(); marker.done(OTHER); marker.setCustomEdgeTokenBinders(leftEdgeProcessor, rightEdgeProcessor); while (builder.getTokenType() != null) builder.advanceLexer(); } }, "Element(ROOT)\n" + " PsiElement(OTHER)('{')\n" + " PsiWhiteSpace(' ')\n" + " Element(OTHER)\n" + " PsiElement(COMMENT)('#')\n" + " PsiWhiteSpace(' ')\n" + " PsiElement(LETTER)('i')\n" + " PsiWhiteSpace(' ')\n" + " PsiElement(COMMENT)('#')\n" + " PsiWhiteSpace(' ')\n" + " PsiElement(OTHER)('}')\n"); } private abstract static class MyLazyElementType extends ILazyParseableElementType implements ILightLazyParseableElementType { protected MyLazyElementType(@NonNls String debugName) { super(debugName, Language.ANY); } } public void testLightChameleon() { final IElementType CHAMELEON_2 = new MyChameleon2Type(); final IElementType CHAMELEON_1 = new MyChameleon1Type(CHAMELEON_2); doTest( "ab{12[.?]}cd{x}", new Parser() { @Override public void parse(PsiBuilder builder) { PsiBuilderUtil.advance(builder, 2); PsiBuilder.Marker chameleon = builder.mark(); PsiBuilderUtil.advance(builder, 8); chameleon.collapse(CHAMELEON_1); PsiBuilderUtil.advance(builder, 2); chameleon = builder.mark(); PsiBuilderUtil.advance(builder, 3); chameleon.collapse(CHAMELEON_1); } }, "Element(ROOT)\n" + " PsiElement(LETTER)('a')\n" + " PsiElement(LETTER)('b')\n" + " Element(CHAMELEON_1)\n" + " PsiElement(OTHER)('{')\n" + " PsiElement(DIGIT)('1')\n" + " PsiElement(DIGIT)('2')\n" + " Element(OTHER)\n" + " Element(CHAMELEON_2)\n" + " PsiElement(OTHER)('[')\n" + " PsiElement(OTHER)('.')\n" + " PsiErrorElement:test error 2\n" + " PsiElement(OTHER)('?')\n" + " PsiElement(OTHER)(']')\n" + " PsiErrorElement:test error 1\n" + " <empty list>\n" + " PsiElement(OTHER)('}')\n" + " PsiElement(LETTER)('c')\n" + " PsiElement(LETTER)('d')\n" + " Element(CHAMELEON_1)\n" + " PsiElement(OTHER)('{')\n" + " PsiElement(LETTER)('x')\n" + " PsiElement(OTHER)('}')\n"); } @SuppressWarnings("ConstantConditions") private static PsiBuilderImpl createBuilder(CharSequence text) { ParserDefinition parserDefinition = new ParserDefinition() { @NotNull @Override public Lexer createLexer(Project project) { return new MyTestLexer(); } @Override public PsiParser createParser(Project project) { return null; } @Override public IFileElementType getFileNodeType() { return null; } @NotNull @Override public TokenSet getWhitespaceTokens() { return WHITESPACE_SET; } @NotNull @Override public TokenSet getCommentTokens() { return COMMENT_SET; } @NotNull @Override public TokenSet getStringLiteralElements() { return null; } @NotNull @Override public PsiElement createElement(ASTNode node) { return null; } @Override public PsiFile createFile(FileViewProvider viewProvider) { return null; } @Override public SpaceRequirements spaceExistanceTypeBetweenTokens(ASTNode left, ASTNode right) { return null; } }; return new PsiBuilderImpl( getProject(), null, parserDefinition, parserDefinition.createLexer(getProject()), null, text, null, null); } private interface Parser { void parse(PsiBuilder builder); } private static void doTest( @NonNls final String text, final Parser parser, @NonNls final String expected) { final PsiBuilder builder = createBuilder(text); final PsiBuilder.Marker rootMarker = builder.mark(); parser.parse(builder); rootMarker.done(ROOT); // check light tree composition final FlyweightCapableTreeStructure<LighterASTNode> lightTree = builder.getLightTree(); assertEquals(expected, DebugUtil.lightTreeToString(lightTree, false)); // verify that light tree can be taken multiple times final FlyweightCapableTreeStructure<LighterASTNode> lightTree2 = builder.getLightTree(); assertEquals(expected, DebugUtil.lightTreeToString(lightTree2, false)); // check heavy tree composition final ASTNode root = builder.getTreeBuilt(); assertEquals(expected, DebugUtil.nodeTreeToString(root, false)); // check heavy vs. light tree merging final PsiBuilder builder2 = createBuilder(text); final PsiBuilder.Marker rootMarker2 = builder2.mark(); parser.parse(builder2); rootMarker2.done(ROOT); DiffTree.diff( new ASTStructure(root), builder2.getLightTree(), new ShallowNodeComparator<ASTNode, LighterASTNode>() { @Override public ThreeState deepEqual(ASTNode oldNode, LighterASTNode newNode) { return ThreeState.UNSURE; } @Override public boolean typesEqual(ASTNode oldNode, LighterASTNode newNode) { return true; } @Override public boolean hashCodesEqual(ASTNode oldNode, LighterASTNode newNode) { return true; } }, new DiffTreeChangeBuilder<ASTNode, LighterASTNode>() { @Override public void nodeReplaced(@NotNull ASTNode oldChild, @NotNull LighterASTNode newChild) { fail("replaced(" + oldChild + "," + newChild.getTokenType() + ")"); } @Override public void nodeDeleted(@NotNull ASTNode oldParent, @NotNull ASTNode oldNode) { fail("deleted(" + oldParent + "," + oldNode + ")"); } @Override public void nodeInserted( @NotNull ASTNode oldParent, @NotNull LighterASTNode newNode, int pos) { fail("inserted(" + oldParent + "," + newNode.getTokenType() + ")"); } }); } private static void doFailTest( @NonNls final String text, final Parser parser, @NonNls final String expected) { final PrintStream std = System.err; //noinspection IOResourceOpenedButNotSafelyClosed System.setErr(new PrintStream(new NullStream())); try { try { ParserDefinition parserDefinition = new ParserDefinition() { @NotNull @Override public Lexer createLexer(Project project) { return null; } @Override public PsiParser createParser(Project project) { return null; } @Override public IFileElementType getFileNodeType() { return null; } @NotNull @Override public TokenSet getWhitespaceTokens() { return TokenSet.EMPTY; } @NotNull @Override public TokenSet getCommentTokens() { return TokenSet.EMPTY; } @NotNull @Override public TokenSet getStringLiteralElements() { return null; } @NotNull @Override public PsiElement createElement(ASTNode node) { return null; } @Override public PsiFile createFile(FileViewProvider viewProvider) { return null; } @Override public SpaceRequirements spaceExistanceTypeBetweenTokens( ASTNode left, ASTNode right) { return null; } }; final PsiBuilder builder = PsiBuilderFactory.getInstance() .createBuilder(parserDefinition, new MyTestLexer(), text); builder.setDebugMode(true); parser.parse(builder); builder.getLightTree(); fail("should fail"); } catch (AssertionError e) { assertEquals(expected, e.getMessage()); } } finally { System.setErr(std); } } private static class MyTestLexer extends LexerBase { private CharSequence myBuffer = ""; private int myIndex = 0; private int myBufferEnd = 1; @Override public void start(CharSequence buffer, int startOffset, int endOffset, int initialState) { myBuffer = buffer.subSequence(startOffset, endOffset); myIndex = 0; myBufferEnd = myBuffer.length(); } @Override public int getState() { return 0; } @Override public IElementType getTokenType() { if (myIndex >= myBufferEnd) return null; else if (Character.isLetter(myBuffer.charAt(myIndex))) return LETTER; else if (Character.isDigit(myBuffer.charAt(myIndex))) return DIGIT; else if (Character.isWhitespace(myBuffer.charAt(myIndex))) return TokenType.WHITE_SPACE; else if (myBuffer.charAt(myIndex) == '#') return COMMENT; else return OTHER; } @Override public int getTokenStart() { return myIndex; } @Override public int getTokenEnd() { return myIndex + 1; } @Override public void advance() { if (myIndex < myBufferEnd) myIndex++; } @Override public CharSequence getBufferSequence() { return myBuffer; } @Override public int getBufferEnd() { return myBufferEnd; } } private static class NullStream extends OutputStream { @Override public void write(final int b) throws IOException {} } private static class MyChameleon1Type extends MyLazyElementType { private final IElementType myCHAMELEON_2; public MyChameleon1Type(IElementType CHAMELEON_2) { super("CHAMELEON_1"); myCHAMELEON_2 = CHAMELEON_2; } @Override public FlyweightCapableTreeStructure<LighterASTNode> parseContents( LighterLazyParseableNode chameleon) { final PsiBuilder builder = createBuilder(chameleon.getText()); parse(builder); return builder.getLightTree(); } @Override public ASTNode parseContents(ASTNode chameleon) { final PsiBuilder builder = createBuilder(chameleon.getText()); parse(builder); return builder.getTreeBuilt().getFirstChildNode(); } public void parse(PsiBuilder builder) { final PsiBuilder.Marker root = builder.mark(); PsiBuilder.Marker nested = null; while (!builder.eof()) { final String token = builder.getTokenText(); if ("[".equals(token) && nested == null) { nested = builder.mark(); } builder.advanceLexer(); if ("]".equals(token) && nested != null) { nested.collapse(myCHAMELEON_2); nested.precede().done(OTHER); nested = null; builder.error("test error 1"); } } if (nested != null) nested.drop(); root.done(this); } } private static class MyChameleon2Type extends MyLazyElementType { public MyChameleon2Type() { super("CHAMELEON_2"); } @Override public FlyweightCapableTreeStructure<LighterASTNode> parseContents( LighterLazyParseableNode chameleon) { final PsiBuilder builder = createBuilder(chameleon.getText()); parse(builder); return builder.getLightTree(); } @Override public ASTNode parseContents(ASTNode chameleon) { final PsiBuilder builder = createBuilder(chameleon.getText()); parse(builder); return builder.getTreeBuilt().getFirstChildNode(); } public void parse(PsiBuilder builder) { final PsiBuilder.Marker root = builder.mark(); PsiBuilder.Marker error = null; while (!builder.eof()) { final String token = builder.getTokenText(); if ("?".equals(token)) error = builder.mark(); builder.advanceLexer(); if (error != null) { error.error("test error 2"); error = null; } } root.done(this); } } }