/** Create DOM builder using JAXP libraries. */ static DocumentBuilder makeBuilder(boolean validate) throws IOException, SAXException { DocumentBuilder builder; DocumentBuilderFactory factory; // create factory according to javax.xml.parsers.SAXParserFactory property // or platform default (i.e. com.sun...) try { factory = DocumentBuilderFactory.newInstance(); factory.setValidating(validate); factory.setNamespaceAware(false); } catch (FactoryConfigurationError err) { notifyFactoryErr(err, "javax.xml.parsers.DocumentBuilderFactory"); // NOI18N throw err; } try { builder = factory.newDocumentBuilder(); } catch (ParserConfigurationException ex) { SAXException sex = new SAXException("Configuration exception."); // NOI18N ErrorManager emgr = ErrorManager.getDefault(); emgr.annotate(sex, ex); emgr.annotate( sex, "Can not create a DOM builder!\nCheck javax.xml.parsers.DocumentBuilderFactory property and the builder library presence on classpath."); // NOI18N throw sex; } return builder; }
public void rawDefineTemplate(String name, CompiledST code, Token defT) { CompiledST prev = rawGetTemplate(name); if (prev != null) { if (!prev.isRegion) { errMgr.compileTimeError(ErrorType.TEMPLATE_REDEFINITION, null, defT); return; } if (prev.isRegion) { if (code.regionDefType != ST.RegionType.IMPLICIT && prev.regionDefType == ST.RegionType.EMBEDDED) { errMgr.compileTimeError( ErrorType.EMBEDDED_REGION_REDEFINITION, null, defT, getUnMangledTemplateName(name)); return; } else if (code.regionDefType == ST.RegionType.IMPLICIT || prev.regionDefType == ST.RegionType.EXPLICIT) { errMgr.compileTimeError( ErrorType.REGION_REDEFINITION, null, defT, getUnMangledTemplateName(name)); return; } } } code.nativeGroup = this; code.templateDefStartToken = defT; templates.put(name, code); }
/** Annotate & notify the exception. */ private static void notifyNewSAXParserEx(Exception ex) { ErrorManager emgr = ErrorManager.getDefault(); emgr.annotate( ex, "Can not create a SAX parser!\nCheck javax.xml.parsers.SAXParserFactory property features and the parser library presence on classpath."); // NOI18N emgr.notify(ex); }
/** Annotate & notify the error. */ private static void notifyFactoryErr(Error err, String property) { ErrorManager emgr = ErrorManager.getDefault(); emgr.annotate( err, "Can not create a factory!\nCheck " + property + " property and the factory library presence on classpath."); // NOI18N emgr.notify(err); }
public String setOption( Map<String, Object> options, Set<String> legalOptions, Grammar grammar, String key, Object value) { if (!legalOptions.contains(key)) { ErrorManager.grammarError(ErrorManager.MSG_ILLEGAL_OPTION, grammar, token, key); return null; } if (value instanceof String) { String vs = (String) value; if (vs.charAt(0) == '"') { value = vs.substring(1, vs.length() - 1); // strip quotes } } if (key.equals("k")) { grammar.numberOfManualLookaheadOptions++; } if (key.equals("backtrack") && value.toString().equals("true")) { grammar.composite.getRootGrammar().atLeastOneBacktrackOption = true; } options.put(key, value); return key; }
public void sortGrammarFiles() throws IOException { // System.out.println("Grammar names "+getGrammarFileNames()); Graph g = new Graph(); List<String> missingFiles = new ArrayList<String>(); for (String gfile : grammarFileNames) { try { GrammarSpelunker grammar = new GrammarSpelunker(inputDirectory, gfile); grammar.parse(); String vocabName = grammar.getTokenVocab(); String grammarName = grammar.getGrammarName(); // Make all grammars depend on any tokenVocab options if (vocabName != null) g.addEdge(gfile, vocabName + CodeGenerator.VOCAB_FILE_EXTENSION); // Make all generated tokens files depend on their grammars g.addEdge(grammarName + CodeGenerator.VOCAB_FILE_EXTENSION, gfile); } catch (FileNotFoundException fnfe) { ErrorManager.error(ErrorManager.MSG_CANNOT_OPEN_FILE, gfile); missingFiles.add(gfile); } } List<Object> sorted = g.sort(); // System.out.println("sorted="+sorted); grammarFileNames.clear(); // wipe so we can give new ordered list for (int i = 0; i < sorted.size(); i++) { String f = (String) sorted.get(i); if (missingFiles.contains(f)) continue; if (!(f.endsWith(".g") || f.endsWith(".g3"))) continue; grammarFileNames.add(f); } // System.out.println("new grammars="+grammarFileNames); }
/** * A list of dependency generators that are accumulated aaaas (and if) the tool is required to * sort the provided grammars into build dependency order. protected Map<String, * BuildDependencyGenerator> buildDependencyGenerators; */ public static void main(String[] args) { Tool antlr = new Tool(args); if (!exitNow) { antlr.process(); if (ErrorManager.getNumErrors() > 0) { System.exit(1); } System.exit(0); } }
/** Make name and alias for target. Replace any previous def of name */ public CompiledST defineTemplateAlias(Token aliasT, Token targetT) { String alias = aliasT.getText(); String target = targetT.getText(); CompiledST targetCode = rawGetTemplate(target); if (targetCode == null) { errMgr.compileTimeError(ErrorType.ALIAS_TARGET_UNDEFINED, null, aliasT, alias, target); return null; } templates.put(alias, targetCode); return targetCode; }
protected ST getEmbeddedInstanceOf( Interpreter interp, ST enclosingInstance, int ip, String name) { if (verbose) System.out.println("getEmbeddedInstanceOf(" + name + ")"); ST st = getInstanceOf(name); if (st == null) { errMgr.runTimeError(interp, enclosingInstance, ip, ErrorType.NO_SUCH_TEMPLATE, name); return createStringTemplateInternally(new CompiledST()); } // this is only called internally. wack any debug ST create events if (trackCreationEvents) { st.debugState.newSTEvent = null; // toss it out } return st; }
/** Load template stream into this group */ public CompiledST loadTemplateFile(String prefix, String fileName, CharStream templateStream) { GroupLexer lexer = new GroupLexer(templateStream); CommonTokenStream tokens = new CommonTokenStream(lexer); GroupParser parser = new GroupParser(tokens); parser.group = this; lexer.group = this; try { parser.templateDef(prefix); } catch (RecognitionException re) { errMgr.groupSyntaxError(ErrorType.SYNTAX_ERROR, fileName, re, re.getMessage()); } String templateName = Misc.getFileNameNoSuffix(fileName); if (prefix != null && prefix.length() > 0) templateName = prefix + "/" + templateName; return rawGetTemplate(templateName); }
/** Load a group file with full path fileName; it's relative to root by prefix. */ public void loadGroupFile(String prefix, String fileName) { // System.out.println("load group file prefix="+prefix+", fileName="+fileName); GroupParser parser = null; try { URL f = new URL(fileName); ANTLRInputStream fs = new ANTLRInputStream(f.openStream(), encoding); GroupLexer lexer = new GroupLexer(fs); fs.name = fileName; CommonTokenStream tokens = new CommonTokenStream(lexer); parser = new GroupParser(tokens); parser.group(this, prefix); } catch (Exception e) { errMgr.IOError(null, ErrorType.CANT_LOAD_GROUP_FILE, e, fileName); } }
protected void generateNFAs(Grammar g) { DOTGenerator dotGenerator = new DOTGenerator(g); Collection rules = g.getAllImportedRules(); rules.addAll(g.getRules()); for (Iterator itr = rules.iterator(); itr.hasNext(); ) { Rule r = (Rule) itr.next(); try { String dot = dotGenerator.getDOT(r.startState); if (dot != null) { writeDOTFile(g, r, dot); } } catch (IOException ioe) { ErrorManager.error(ErrorManager.MSG_CANNOT_WRITE_FILE, ioe); } } }
public void generateDFAs(Grammar g) { for (int d = 1; d <= g.getNumberOfDecisions(); d++) { DFA dfa = g.getLookaheadDFA(d); if (dfa == null) { continue; // not there for some reason, ignore } DOTGenerator dotGenerator = new DOTGenerator(g); String dot = dotGenerator.getDOT(dfa.startState); String dotFileName = g.name + "." + "dec-" + d; if (g.implicitLexer) { dotFileName = g.name + Grammar.grammarTypeToFileNameSuffix[g.type] + "." + "dec-" + d; } try { writeDOTFile(g, dotFileName, dot); } catch (IOException ioe) { ErrorManager.error(ErrorManager.MSG_CANNOT_GEN_DOT_FILE, dotFileName, ioe); } } }
private static void Xhelp() { ErrorManager.info("ANTLR Parser Generator Version " + new Tool().VERSION); System.err.println(" -Xgrtree print the grammar AST"); System.err.println(" -Xdfa print DFA as text "); System.err.println(" -Xnoprune test lookahead against EBNF block exit branches"); System.err.println(" -Xnocollapse collapse incident edges into DFA states"); System.err.println(" -Xdbgconversion dump lots of info during NFA conversion"); System.err.println(" -Xmultithreaded run the analysis in 2 threads"); System.err.println(" -Xnomergestopstates do not merge stop states"); System.err.println(" -Xdfaverbose generate DFA states in DOT with NFA configs"); System.err.println(" -Xwatchconversion print a message for each NFA before converting"); System.err.println( " -XdbgST put tags at start/stop of all templates in output"); System.err.println( " -Xnfastates for nondeterminisms, list NFA states for each path"); System.err.println( " -Xm m max number of rule invocations during conversion [" + NFAContext.MAX_SAME_RULE_INVOCATIONS_PER_NFA_CONFIG_STACK + "]"); System.err.println( " -Xmaxdfaedges m max \"comfortable\" number of edges for single DFA state [" + DFA.MAX_STATE_TRANSITIONS_FOR_TABLE + "]"); System.err.println( " -Xconversiontimeout t set NFA conversion timeout (ms) for each decision [" + DFA.MAX_TIME_PER_DFA_CREATION + "]"); System.err.println( " -Xmaxinlinedfastates m max DFA states before table used rather than inlining [" + CodeGenerator.MADSI_DEFAULT + "]"); System.err.println( " -Xmaxswitchcaselabels m don't generate switch() statements for dfas bigger than m [" + CodeGenerator.MSCL_DEFAULT + "]"); System.err.println( " -Xminswitchalts m don't generate switch() statements for dfas smaller than m [" + CodeGenerator.MSA_DEFAULT + "]"); }
private static void help() { ErrorManager.info("ANTLR Parser Generator Version " + new Tool().VERSION); System.err.println("usage: java org.antlr.Tool [args] file.g [file2.g file3.g ...]"); System.err.println( " -o outputDir specify output directory where all output is generated"); System.err.println( " -fo outputDir same as -o but force even files with relative paths to dir"); System.err.println(" -lib dir specify location of token files"); System.err.println(" -depend generate file dependencies"); System.err.println(" -report print out a report about the grammar(s) processed"); System.err.println(" -print print out the grammar without actions"); System.err.println(" -debug generate a parser that emits debugging events"); System.err.println( " -profile generate a parser that computes profiling information"); System.err.println(" -nfa generate an NFA for each rule"); System.err.println(" -dfa generate a DFA for each decision point"); System.err.println(" -message-format name specify output style for messages"); System.err.println(" -verbose generate ANTLR version and other information"); System.err.println(" -make only build if generated files older than grammar"); System.err.println(" -version print the version of ANTLR and exit."); System.err.println(" -X display extended argument list"); }
public CompiledST defineRegion( String enclosingTemplateName, Token regionT, String template, Token templateToken) { String name = regionT.getText(); template = Misc.trimOneStartingNewline(template); template = Misc.trimOneTrailingNewline(template); CompiledST code = compile(getFileName(), enclosingTemplateName, null, template, templateToken); String mangled = getMangledRegionName(enclosingTemplateName, name); if (lookupTemplate(mangled) == null) { errMgr.compileTimeError( ErrorType.NO_SUCH_REGION, templateToken, regionT, enclosingTemplateName, name); return new CompiledST(); } code.name = mangled; code.isRegion = true; code.regionDefType = ST.RegionType.EXPLICIT; code.templateDefStartToken = regionT; rawDefineTemplate(mangled, code, regionT); code.defineArgDefaultValueTemplates(this); code.defineImplicitlyDefinedTemplates(this); return code; }
private static void version() { ErrorManager.info("ANTLR Parser Generator Version " + new Tool().VERSION); }
/** * Import template files, directories, and group files. Priority is given to templates defined in * the current group; this, in effect, provides inheritance. Polymorphism is in effect so that if * an inherited template references template t() then we search for t() in the subgroup first. * * <p>If you specify an absolute file name or directory name, the import statement uses that * directly. If it is not an absolute path, we look that entity up in the directory holding the * group that initiates the import. If file or directory is not in that directory, then we load * using the classpath. * * <p>Templates are loaded on-demand from import dirs. Imported groups are loaded on-demand when * searching for a template. * * <p>The listener of this group is passed to the import group so errors found while loading * imported element are sent to listener of this group. */ public void importTemplates(Token fileNameToken) { String fileName = fileNameToken.getText(); // do nothing upon syntax error if (fileName == null || fileName.equals("<missing STRING>")) return; fileName = Misc.strip(fileName, 1); // System.out.println("import "+fileName); boolean isGroupFile = fileName.endsWith(".stg"); boolean isTemplateFile = fileName.endsWith(".st"); boolean isGroupDir = !(isGroupFile || isTemplateFile); STGroup g = null; File f = new File(fileName); if (f.isAbsolute()) { // load directly if absolute if (isTemplateFile) { g = new STGroup(); g.setListener(this.getListener()); g.loadAbsoluteTemplateFile(fileName); } else if (isGroupFile) { g = new STGroupFile(fileName, delimiterStartChar, delimiterStopChar); g.setListener(this.getListener()); } else if (isGroupDir) { g = new STGroupDir(fileName, delimiterStartChar, delimiterStopChar); g.setListener(this.getListener()); } importTemplates(g); return; } // it's a relative name; search path is working dir, g.stg's dir, CLASSPATH URL thisRoot = getRootDirURL(); URL fileUnderRoot = null; // System.out.println("thisRoot="+thisRoot); try { fileUnderRoot = new URL(thisRoot + "/" + fileName); } catch (MalformedURLException mfe) { errMgr.internalError(null, "can't build URL for " + thisRoot + "/" + fileName, mfe); return; } if (isTemplateFile) { g = new STGroup(); g.setListener(this.getListener()); URL fileURL; if (Misc.urlExists(fileUnderRoot)) fileURL = fileUnderRoot; else fileURL = getURL(fileName); // try CLASSPATH if (fileURL != null) { try { InputStream s = fileURL.openStream(); ANTLRInputStream templateStream = new ANTLRInputStream(s); templateStream.name = fileName; CompiledST code = g.loadTemplateFile("", fileName, templateStream); if (code == null) g = null; } catch (IOException ioe) { errMgr.internalError(null, "can't read from " + fileURL, ioe); g = null; } } else { g = null; } } else if (isGroupFile) { // System.out.println("look for fileUnderRoot: "+fileUnderRoot); if (Misc.urlExists(fileUnderRoot)) { g = new STGroupFile(fileUnderRoot, encoding, delimiterStartChar, delimiterStopChar); g.setListener(this.getListener()); } else { g = new STGroupFile(fileName, delimiterStartChar, delimiterStopChar); g.setListener(this.getListener()); } } else if (isGroupDir) { // System.out.println("try dir "+fileUnderRoot); if (Misc.urlExists(fileUnderRoot)) { g = new STGroupDir(fileUnderRoot, encoding, delimiterStartChar, delimiterStopChar); g.setListener(this.getListener()); } else { // try in CLASSPATH // System.out.println("try dir in CLASSPATH "+fileName); g = new STGroupDir(fileName, delimiterStartChar, delimiterStopChar); g.setListener(this.getListener()); } } if (g == null) { errMgr.compileTimeError(ErrorType.CANT_IMPORT, null, fileNameToken, fileName); } else { importTemplates(g); } }
/** Annotate & notify the error. */ private static void notifyException(Throwable err) { ErrorManager emgr = ErrorManager.getDefault(); emgr.notify(emgr.INFORMATIONAL, err); }
/** * Returns the current setting of the message format descriptor * * @return Current message format */ public String getMessageFormat() { return ErrorManager.getMessageFormat().toString(); }
/** * Returns the number of errors that the analysis/processing threw up. * * @return Error count */ public int getNumErrors() { return ErrorManager.getNumErrors(); }
/** * Set the message format to one of ANTLR, gnu, vs2005 * * @param format */ public void setMessageFormat(String format) { ErrorManager.setFormat(format); }
public void processArgs(String[] args) { if (isVerbose()) { ErrorManager.info("ANTLR Parser Generator Version " + VERSION); showBanner = false; } if (args == null || args.length == 0) { help(); return; } for (int i = 0; i < args.length; i++) { if (args[i].equals("-o") || args[i].equals("-fo")) { if (i + 1 >= args.length) { System.err.println("missing output directory with -fo/-o option; ignoring"); } else { if (args[i].equals("-fo")) { // force output into dir setForceAllFilesToOutputDir(true); } i++; outputDirectory = args[i]; if (outputDirectory.endsWith("/") || outputDirectory.endsWith("\\")) { outputDirectory = outputDirectory.substring(0, getOutputDirectory().length() - 1); } File outDir = new File(outputDirectory); haveOutputDir = true; if (outDir.exists() && !outDir.isDirectory()) { ErrorManager.error(ErrorManager.MSG_OUTPUT_DIR_IS_FILE, outputDirectory); setLibDirectory("."); } } } else if (args[i].equals("-lib")) { if (i + 1 >= args.length) { System.err.println("missing library directory with -lib option; ignoring"); } else { i++; setLibDirectory(args[i]); if (getLibraryDirectory().endsWith("/") || getLibraryDirectory().endsWith("\\")) { setLibDirectory(getLibraryDirectory().substring(0, getLibraryDirectory().length() - 1)); } File outDir = new File(getLibraryDirectory()); if (!outDir.exists()) { ErrorManager.error(ErrorManager.MSG_DIR_NOT_FOUND, getLibraryDirectory()); setLibDirectory("."); } } } else if (args[i].equals("-nfa")) { setGenerate_NFA_dot(true); } else if (args[i].equals("-dfa")) { setGenerate_DFA_dot(true); } else if (args[i].equals("-debug")) { setDebug(true); } else if (args[i].equals("-trace")) { setTrace(true); } else if (args[i].equals("-report")) { setReport(true); } else if (args[i].equals("-profile")) { setProfile(true); } else if (args[i].equals("-print")) { setPrintGrammar(true); } else if (args[i].equals("-depend")) { setDepend(true); } else if (args[i].equals("-verbose")) { setVerbose(true); } else if (args[i].equals("-version")) { version(); exitNow = true; } else if (args[i].equals("-make")) { setMake(true); } else if (args[i].equals("-message-format")) { if (i + 1 >= args.length) { System.err.println("missing output format with -message-format option; using default"); } else { i++; ErrorManager.setFormat(args[i]); } } else if (args[i].equals("-Xgrtree")) { internalOption_PrintGrammarTree = true; // print grammar tree } else if (args[i].equals("-Xdfa")) { internalOption_PrintDFA = true; } else if (args[i].equals("-Xnoprune")) { DFAOptimizer.PRUNE_EBNF_EXIT_BRANCHES = false; } else if (args[i].equals("-Xnocollapse")) { DFAOptimizer.COLLAPSE_ALL_PARALLEL_EDGES = false; } else if (args[i].equals("-Xdbgconversion")) { NFAToDFAConverter.debug = true; } else if (args[i].equals("-Xmultithreaded")) { NFAToDFAConverter.SINGLE_THREADED_NFA_CONVERSION = false; } else if (args[i].equals("-Xnomergestopstates")) { DFAOptimizer.MERGE_STOP_STATES = false; } else if (args[i].equals("-Xdfaverbose")) { internalOption_ShowNFAConfigsInDFA = true; } else if (args[i].equals("-Xwatchconversion")) { internalOption_watchNFAConversion = true; } else if (args[i].equals("-XdbgST")) { CodeGenerator.EMIT_TEMPLATE_DELIMITERS = true; } else if (args[i].equals("-Xmaxinlinedfastates")) { if (i + 1 >= args.length) { System.err.println("missing max inline dfa states -Xmaxinlinedfastates option; ignoring"); } else { i++; CodeGenerator.MAX_ACYCLIC_DFA_STATES_INLINE = Integer.parseInt(args[i]); } } else if (args[i].equals("-Xmaxswitchcaselabels")) { if (i + 1 >= args.length) { System.err.println( "missing max switch case labels -Xmaxswitchcaselabels option; ignoring"); } else { i++; CodeGenerator.MAX_SWITCH_CASE_LABELS = Integer.parseInt(args[i]); } } else if (args[i].equals("-Xminswitchalts")) { if (i + 1 >= args.length) { System.err.println("missing min switch alternatives -Xminswitchalts option; ignoring"); } else { i++; CodeGenerator.MIN_SWITCH_ALTS = Integer.parseInt(args[i]); } } else if (args[i].equals("-Xm")) { if (i + 1 >= args.length) { System.err.println("missing max recursion with -Xm option; ignoring"); } else { i++; NFAContext.MAX_SAME_RULE_INVOCATIONS_PER_NFA_CONFIG_STACK = Integer.parseInt(args[i]); } } else if (args[i].equals("-Xmaxdfaedges")) { if (i + 1 >= args.length) { System.err.println("missing max number of edges with -Xmaxdfaedges option; ignoring"); } else { i++; DFA.MAX_STATE_TRANSITIONS_FOR_TABLE = Integer.parseInt(args[i]); } } else if (args[i].equals("-Xconversiontimeout")) { if (i + 1 >= args.length) { System.err.println("missing max time in ms -Xconversiontimeout option; ignoring"); } else { i++; DFA.MAX_TIME_PER_DFA_CREATION = Integer.parseInt(args[i]); } } else if (args[i].equals("-Xnfastates")) { DecisionProbe.verbose = true; } else if (args[i].equals("-X")) { Xhelp(); } else { if (args[i].charAt(0) != '-') { // Must be the grammar file addGrammarFile(args[i]); } } } }
public void process() { boolean exceptionWhenWritingLexerFile = false; String lexerGrammarFileName = null; // necessary at this scope to have access in the catch below // Have to be tricky here when Maven or build tools call in and must new Tool() // before setting options. The banner won't display that way! if (isVerbose() && showBanner) { ErrorManager.info("ANTLR Parser Generator Version " + VERSION); showBanner = false; } try { sortGrammarFiles(); // update grammarFileNames } catch (Exception e) { ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e); } catch (Error e) { ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e); } for (String grammarFileName : grammarFileNames) { // If we are in make mode (to support build tools like Maven) and the // file is already up to date, then we do not build it (and in verbose mode // we will say so). if (make) { try { if (!buildRequired(grammarFileName)) continue; } catch (Exception e) { ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e); } } if (isVerbose() && !isDepend()) { System.out.println(grammarFileName); } try { if (isDepend()) { BuildDependencyGenerator dep = new BuildDependencyGenerator(this, grammarFileName); /* List outputFiles = dep.getGeneratedFileList(); List dependents = dep.getDependenciesFileList(); System.out.println("output: "+outputFiles); System.out.println("dependents: "+dependents); */ System.out.println(dep.getDependencies()); continue; } Grammar grammar = getRootGrammar(grammarFileName); // we now have all grammars read in as ASTs // (i.e., root and all delegates) grammar.composite.assignTokenTypes(); grammar.composite.defineGrammarSymbols(); grammar.composite.createNFAs(); generateRecognizer(grammar); if (isPrintGrammar()) { grammar.printGrammar(System.out); } if (isReport()) { GrammarReport greport = new GrammarReport(grammar); System.out.println(greport.toString()); // print out a backtracking report too (that is not encoded into log) System.out.println(greport.getBacktrackingReport()); // same for aborted NFA->DFA conversions System.out.println(greport.getAnalysisTimeoutReport()); } if (isProfile()) { GrammarReport greport = new GrammarReport(grammar); Stats.writeReport(GrammarReport.GRAMMAR_STATS_FILENAME, greport.toNotifyString()); } // now handle the lexer if one was created for a merged spec String lexerGrammarStr = grammar.getLexerGrammar(); // System.out.println("lexer grammar:\n"+lexerGrammarStr); if (grammar.type == Grammar.COMBINED && lexerGrammarStr != null) { lexerGrammarFileName = grammar.getImplicitlyGeneratedLexerFileName(); try { Writer w = getOutputFile(grammar, lexerGrammarFileName); w.write(lexerGrammarStr); w.close(); } catch (IOException e) { // emit different error message when creating the implicit lexer fails // due to write permission error exceptionWhenWritingLexerFile = true; throw e; } try { StringReader sr = new StringReader(lexerGrammarStr); Grammar lexerGrammar = new Grammar(); lexerGrammar.composite.watchNFAConversion = internalOption_watchNFAConversion; lexerGrammar.implicitLexer = true; lexerGrammar.setTool(this); File lexerGrammarFullFile = new File(getFileDirectory(lexerGrammarFileName), lexerGrammarFileName); lexerGrammar.setFileName(lexerGrammarFullFile.toString()); lexerGrammar.importTokenVocabulary(grammar); lexerGrammar.parseAndBuildAST(sr); sr.close(); lexerGrammar.composite.assignTokenTypes(); lexerGrammar.composite.defineGrammarSymbols(); lexerGrammar.composite.createNFAs(); generateRecognizer(lexerGrammar); } finally { // make sure we clean up if (deleteTempLexer) { File outputDir = getOutputDirectory(lexerGrammarFileName); File outputFile = new File(outputDir, lexerGrammarFileName); outputFile.delete(); } } } } catch (IOException e) { if (exceptionWhenWritingLexerFile) { ErrorManager.error(ErrorManager.MSG_CANNOT_WRITE_FILE, lexerGrammarFileName, e); } else { ErrorManager.error(ErrorManager.MSG_CANNOT_OPEN_FILE, grammarFileName); } } catch (Exception e) { ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, grammarFileName, e); } /* finally { System.out.println("creates="+ Interval.creates); System.out.println("hits="+ Interval.hits); System.out.println("misses="+ Interval.misses); System.out.println("outOfRange="+ Interval.outOfRange); } */ } }