@Override public void start( @NotNull CharSequence buffer, int startOffset, int endOffset, int initialState) { // logger = MultiMarkdownPlugin.getInstance().getLogger(); // logger.info("LexStart("+String.valueOf(startOffset) + ", " + String.valueOf(endOffset) + ", " // + String.valueOf(initialState) + " for " + this.toString() + " from " + // Thread.currentThread().toString()); // logStackTrace(); this.buffer = buffer; this.currentOffset = this.startOffset = startOffset; this.endOffset = endOffset; lexemeIndex = initialState; lexerTokens = null; if (buffer.length() > 0) { lexParser.parseMarkdown(buffer.toString()); lexerTokens = lexParser.getLexerTokens(); lexParser.clearParsed(); // release all memory from the parse, we don't need it. } lexerToken = null; // System.out.format("start lexer buffer end %d, start %d, end %d, state %d\n", buffer.length(), // startOffset, endOffset, initialState); // prime the lexeme stream, if the first is white space we need to start with that if (lexerTokens != null && lexerTokens.length > 0) { lexerToken = lexerTokens[lexemeIndex]; if (currentOffset <= lexerToken.getRange().getStart()) { lexerToken = lexParser.getSkippedSpaceToken(currentOffset, lexerToken.getRange().getStart()); } else { lexemeIndex++; } } if (lexerToken == null) { // create a dummy whitespace token for the whole file lexerToken = lexParser.getSkippedSpaceToken(currentOffset, this.endOffset); } currentOffset = lexerToken.getRange().getEnd(); // assert currentOffset <= endOffset; if (currentOffset > endOffset) { currentOffset = endOffset; } }
@Nullable @Override public IElementType getTokenType() { // return lexerToken != null && lexerToken.getRange().getStart() < endOffset ? // lexerToken.getElementType() : null; return lexerToken != null ? lexerToken.getElementType() : null; }
@Override public void advance() { if (currentOffset < endOffset) { do { if (lexerTokens != null && lexemeIndex >= 0 && lexemeIndex < lexerTokens.length) { if (lexerToken == null || currentOffset < lexerToken.getRange().getStart()) { lexerToken = lexParser.getSkippedSpaceToken( currentOffset, lexerTokens[lexemeIndex].getRange().getStart()); } else { lexerToken = lexerTokens[lexemeIndex]; if (currentOffset < lexerToken.getRange().getStart()) { lexerToken = lexParser.getSkippedSpaceToken(currentOffset, lexerToken.getRange().getStart()); } else { lexemeIndex++; } } } else { if (currentOffset < endOffset) { lexerToken = lexParser.getSkippedSpaceToken(currentOffset, endOffset); } else { lexerToken = null; } } } while (lexerToken != null && lexerToken.getRange().getEnd() < currentOffset); currentOffset = lexerToken == null ? endOffset : lexerToken.getRange().getEnd(); } else { lexerToken = null; } // assert currentOffset <= endOffset; if (currentOffset > endOffset) { lexerToken = null; currentOffset = endOffset; } // System.out.print("advanced to " + currentOffset + " (" + (lexerToken == null ? "null" : // lexerToken.toString()) + ")\n"); }
@Override public int getTokenEnd() { // return lexerToken != null && lexerToken.getRange().getEnd() <= endOffset ? // lexerToken.getRange().getEnd() : endOffset; return lexerToken != null ? lexerToken.getRange().getEnd() : endOffset; }
@Override public int getTokenStart() { return lexerToken != null ? lexerToken.getRange().getStart() : endOffset; }