/**
   * Turns a token stream into a string, makes for easy debugging of token errors.
   *
   * @param lexer
   * @return
   */
  public static String tokensToString(final Lexer lexer) {
    final StringBuilder build = new StringBuilder();
    Token t;
    final String[] names = lexer.getTokenNames();
    while ((t = lexer.nextToken()) != null) {
      build.append("|");
      build.append(t.getText());
      build.append(" -> ");
      if (t.getType() >= 0) {
        build.append(names[t.getType()]);
      } else if (t.getType() == Token.EOF) {
        build.append("EOF");
        break;
      } else {
        build.append("???");
      }
    }

    return build.toString();
  }
Exemple #2
0
  private static List<Token> loadTokens(final File file, final Lexer lexer) {
    Read reader = new Read(filename);
    if (!reader.isOpen()) {
      return null;
    }

    lexer.setInputStream(reader.getInput());
    CommonTokenStream tokenStream = new CommonTokenStream(lexer);
    tokenStream.fill();
    reader.close();

    return tokenStream.getTokens();
  }
Exemple #3
0
 /**
  * {@inheritDoc}
  *
  * <p>Custom actions are implemented by calling {@link Lexer#action} with the appropriate rule and
  * action indexes.
  */
 @Override
 public void execute(@NotNull Lexer lexer) {
   lexer.action(null, ruleIndex, actionIndex);
 }