public void tokenize(SourceCode sourceCode, Tokens tokenEntries) {
    StringBuilder stringBuilder = sourceCode.getCodeBuffer();

    // Note that Java version is irrelevant for tokenizing
    LanguageVersionHandler languageVersionHandler =
        LanguageVersion.JAVA_14.getLanguageVersionHandler();
    String fileName = sourceCode.getFileName();
    TokenManager tokenMgr =
        languageVersionHandler
            .getParser(languageVersionHandler.getDefaultParserOptions())
            .getTokenManager(fileName, new StringReader(stringBuilder.toString()));
    Token currentToken = (Token) tokenMgr.getNextToken();

    TokenDiscarder discarder = new TokenDiscarder(ignoreAnnotations);

    while (currentToken.image.length() > 0) {
      discarder.updateState(currentToken);

      if (discarder.isDiscarding()) {
        currentToken = (Token) tokenMgr.getNextToken();
        continue;
      }

      processToken(tokenEntries, fileName, currentToken);
      currentToken = (Token) tokenMgr.getNextToken();
    }
    tokenEntries.add(TokenEntry.getEOF());
  }
  @Override
  public final void tokenize(SourceCode source, Tokens cpdTokens) {
    String fileName = source.getFileName();
    Token token;
    TokenStream tokenStream;

    try {
      tokenStream = new GroovyLexer(new FileReader(new File(fileName))).plumb();
      token = tokenStream.nextToken();
      while (token.getType() != Token.EOF_TYPE) {
        cpdTokens.add(new TokenEntry(token.getText(), fileName, token.getLine()));
        token = tokenStream.nextToken();
      }
    } catch (TokenStreamException tse) {
      LOG.error("Unexpected token when lexing file : " + fileName, tse);
    } catch (FileNotFoundException fnfe) {
      LOG.error("Could not find : " + fileName, fnfe);
    }
    cpdTokens.add(TokenEntry.getEOF());
  }
  public final void tokenize(SourceCode source, Tokens cpdTokens) {
    String fileName = source.getFileName();
    Token token;
    ES3Lexer lexer;

    try {
      lexer = new ES3Lexer(new ANTLRFileStream(fileName));

      token = lexer.nextToken();
      while (token.getType() != Token.EOF) {
        cpdTokens.add(new TokenEntry(token.getText(), fileName, token.getLine()));
        token = lexer.nextToken();
      }
    } catch (FileNotFoundException fnfe) {
      LOG.error("File not found", fnfe);
    } catch (IOException e) {
      LOG.error("Cannot read file", e);
    }

    cpdTokens.add(TokenEntry.getEOF());
  }