protected boolean tokenEndsAtEndOfLine(TokenSourceWithStateV4<TState> lexer, Token token) {
    CharStream charStream = lexer.getInputStream();
    if (charStream != null) {
      int nextCharIndex = token.getStopIndex() + 1;
      if (nextCharIndex >= charStream.size()) {
        return true;
      }

      int c =
          charStream
              .getText(Interval.of(token.getStopIndex() + 1, token.getStopIndex() + 1))
              .charAt(0);
      return c == '\r' || c == '\n';
    }

    if (token.getStopIndex() + 1 >= snapshot.length()) {
      return true;
    }

    char c = snapshot.subSequence(token.getStopIndex() + 1, token.getStopIndex() + 1).charAt(0);
    return c == '\r' || c == '\n';

    /*int line = snapshot.findLineNumber(token.getStopIndex() + 1);
    int lineStart = snapshot.findLineFromOffset(line).getStart().getOffset();
    int nextLineStart = snapshot.findLineFromOffset(line + 1).getStart().getOffset();
    int lineEnd = nextLineStart - 1;
    if (lineEnd > 0 && lineEnd > lineStart) {
        char c = snapshot.charAt(lineEnd - 1);
        if (c == '\r' || c == '\n') {
            lineEnd--;
        }
    }

    return lineEnd <= token.getStopIndex() + 1 && nextLineStart >= token.getStopIndex() + 1;*/
  }
  protected CharStream createInputStream(OffsetRegion span) throws BadLocationException {
    CharStream input;
    if (span.getLength() > 1000) {
      input = new DocumentSnapshotCharStream(snapshot, span);
    } else {
      input = new DocumentSnapshotCharStream(snapshot);
    }

    input.seek(span.getStart());
    return input;
  }
Esempio n. 3
0
  public List<String> getTokenTypes(LexerGrammar lg, ATN atn, CharStream input) {
    LexerATNSimulator interp =
        new LexerATNSimulator(
            atn, new DFA[] {new DFA(atn.modeToStartState.get(Lexer.DEFAULT_MODE))}, null);
    List<String> tokenTypes = new ArrayList<String>();
    int ttype;
    boolean hitEOF = false;
    do {
      if (hitEOF) {
        tokenTypes.add("EOF");
        break;
      }
      int t = input.LA(1);
      ttype = interp.match(input, Lexer.DEFAULT_MODE);
      if (ttype == Token.EOF) {
        tokenTypes.add("EOF");
      } else {
        tokenTypes.add(lg.typeToTokenList.get(ttype));
      }

      if (t == IntStream.EOF) {
        hitEOF = true;
      }
    } while (ttype != Token.EOF);
    return tokenTypes;
  }
Esempio n. 4
0
  public static Interval getSourceInterval(@NonNull ParserRuleContext context) {
    Parameters.notNull("context", context);
    int startIndex = context.start.getStartIndex();
    Token stopSymbol = getStopSymbol(context);
    if (stopSymbol == null) {
      return new Interval(startIndex, startIndex - 1);
    }

    int stopIndex;
    if (stopSymbol.getType() != Token.EOF) {
      stopIndex = stopSymbol.getStopIndex();
    } else {
      TokenSource tokenSource = context.getStart().getTokenSource();
      CharStream inputStream = tokenSource != null ? tokenSource.getInputStream() : null;
      if (inputStream != null) {
        stopIndex = inputStream.size() - 1;
      } else {
        stopIndex = context.start.getStartIndex() - 1;
      }
    }

    stopIndex = Math.max(stopIndex, startIndex - 1);
    return new Interval(startIndex, stopIndex);
  }