@Override public void checkConfiguration(final Issues issues) { super.checkConfiguration(issues); if (getOptions().isBacktrackLexer() && getOptions().isIgnoreCase()) { issues.addError("Backtracking lexer and ignorecase cannot be combined for now."); } }
@Override public void generate(final Grammar grammar, final XpandExecutionContext ctx) { KeywordHelper helper = new KeywordHelper(grammar, getOptions().isIgnoreCase()); super.generate(grammar, ctx); final String srcGenPath = ctx.getOutput().getOutlet(Generator.SRC_GEN).getPath(); final String encoding = getEncoding(ctx, Generator.SRC_GEN); final String lexerBaseFileName = srcGenPath + "/" + getFragmentHelper().getLexerGrammarFileName(grammar).replace('.', '/'); String libPath = lexerBaseFileName; libPath = libPath.substring(0, libPath.lastIndexOf('/')); String absoluteLexerFileName = lexerBaseFileName + ".g"; String absoluteParserFileName = srcGenPath + "/" + getFragmentHelper().getParserGrammarFileName(grammar).replace('.', '/') + ".g"; addAntlrParam("-fo"); addAntlrParam( absoluteParserFileName .substring(0, absoluteParserFileName.lastIndexOf('/')) .replace("//", "/")); String[] lexerAntlrParams = getAntlrParams(); lexerAntlrParams[lexerAntlrParams.length - 1] = absoluteLexerFileName .substring(0, absoluteLexerFileName.lastIndexOf('/')) .replace("//", "/"); // copy copy(new File(absoluteLexerFileName), new File(lexerBaseFileName + ".gxtext")); writeFile(absoluteLexerFileName, readFile(absoluteLexerFileName.replace("src-gen", "java"))); getAntlrTool() .runWithEncodingAndParams( absoluteLexerFileName.replace("//", "/"), encoding, lexerAntlrParams); cleanupLexerTokensFile(lexerBaseFileName); addAntlrParam("-lib"); addAntlrParam(libPath.replace("//", "/")); getAntlrTool() .runWithEncodingAndParams( absoluteParserFileName.replace("//", "/"), encoding, getAntlrParams()); simplifyUnorderedGroupPredicatesIfRequired(grammar, absoluteParserFileName); splitParserAndLexerIfEnabled(absoluteLexerFileName, absoluteParserFileName); suppressWarnings(absoluteLexerFileName, absoluteParserFileName); MutableTokenDefProvider provider = createLexerTokensProvider(lexerBaseFileName); for (Map.Entry<Integer, String> entry : provider.getTokenDefMap().entrySet()) { String value = entry.getValue(); if (helper.isKeywordRule(value)) { String keywordAsAntlrString = AntlrGrammarGenUtil.toAntlrString(helper.getKeywordValue(value)); entry.setValue("'" + keywordAsAntlrString + "'"); } else if (value.startsWith("'")) { value = AntlrGrammarGenUtil.toAntlrString(value); entry.setValue("'" + value + "'"); } } try { provider.writeTokenFile( new PrintWriter( new File( srcGenPath + "/" + getFragmentHelper().getParserGrammarFileName(grammar).replace('.', '/') + ".tokens"))); } catch (IOException e) { throw new RuntimeException(e); } helper.discardHelper(grammar); }