コード例 #1
0
ファイル: Tool.java プロジェクト: laiello/perseph
 protected void writeDOTFile(Grammar g, String name, String dot) throws IOException {
   Writer fw = getOutputFile(g, name + ".dot");
   fw.write(dot);
   fw.close();
 }
コード例 #2
0
ファイル: Tool.java プロジェクト: laiello/perseph
  public void process() {
    boolean exceptionWhenWritingLexerFile = false;
    String lexerGrammarFileName = null; // necessary at this scope to have access in the catch below

    // Have to be tricky here when Maven or build tools call in and must new Tool()
    // before setting options. The banner won't display that way!
    if (isVerbose() && showBanner) {
      ErrorManager.info("ANTLR Parser Generator  Version " + VERSION);
      showBanner = false;
    }

    try {
      sortGrammarFiles(); // update grammarFileNames
    } catch (Exception e) {
      ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e);
    } catch (Error e) {
      ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e);
    }

    for (String grammarFileName : grammarFileNames) {
      // If we are in make mode (to support build tools like Maven) and the
      // file is already up to date, then we do not build it (and in verbose mode
      // we will say so).
      if (make) {
        try {
          if (!buildRequired(grammarFileName)) continue;
        } catch (Exception e) {
          ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e);
        }
      }

      if (isVerbose() && !isDepend()) {
        System.out.println(grammarFileName);
      }
      try {
        if (isDepend()) {
          BuildDependencyGenerator dep = new BuildDependencyGenerator(this, grammarFileName);
          /*
          List outputFiles = dep.getGeneratedFileList();
          List dependents = dep.getDependenciesFileList();
          System.out.println("output: "+outputFiles);
          System.out.println("dependents: "+dependents);
           */
          System.out.println(dep.getDependencies());
          continue;
        }

        Grammar grammar = getRootGrammar(grammarFileName);
        // we now have all grammars read in as ASTs
        // (i.e., root and all delegates)
        grammar.composite.assignTokenTypes();
        grammar.composite.defineGrammarSymbols();
        grammar.composite.createNFAs();

        generateRecognizer(grammar);

        if (isPrintGrammar()) {
          grammar.printGrammar(System.out);
        }

        if (isReport()) {
          GrammarReport greport = new GrammarReport(grammar);
          System.out.println(greport.toString());
          // print out a backtracking report too (that is not encoded into log)
          System.out.println(greport.getBacktrackingReport());
          // same for aborted NFA->DFA conversions
          System.out.println(greport.getAnalysisTimeoutReport());
        }
        if (isProfile()) {
          GrammarReport greport = new GrammarReport(grammar);
          Stats.writeReport(GrammarReport.GRAMMAR_STATS_FILENAME, greport.toNotifyString());
        }

        // now handle the lexer if one was created for a merged spec
        String lexerGrammarStr = grammar.getLexerGrammar();
        // System.out.println("lexer grammar:\n"+lexerGrammarStr);
        if (grammar.type == Grammar.COMBINED && lexerGrammarStr != null) {
          lexerGrammarFileName = grammar.getImplicitlyGeneratedLexerFileName();
          try {
            Writer w = getOutputFile(grammar, lexerGrammarFileName);
            w.write(lexerGrammarStr);
            w.close();
          } catch (IOException e) {
            // emit different error message when creating the implicit lexer fails
            // due to write permission error
            exceptionWhenWritingLexerFile = true;
            throw e;
          }
          try {
            StringReader sr = new StringReader(lexerGrammarStr);
            Grammar lexerGrammar = new Grammar();
            lexerGrammar.composite.watchNFAConversion = internalOption_watchNFAConversion;
            lexerGrammar.implicitLexer = true;
            lexerGrammar.setTool(this);
            File lexerGrammarFullFile =
                new File(getFileDirectory(lexerGrammarFileName), lexerGrammarFileName);
            lexerGrammar.setFileName(lexerGrammarFullFile.toString());

            lexerGrammar.importTokenVocabulary(grammar);
            lexerGrammar.parseAndBuildAST(sr);

            sr.close();

            lexerGrammar.composite.assignTokenTypes();
            lexerGrammar.composite.defineGrammarSymbols();
            lexerGrammar.composite.createNFAs();

            generateRecognizer(lexerGrammar);
          } finally {
            // make sure we clean up
            if (deleteTempLexer) {
              File outputDir = getOutputDirectory(lexerGrammarFileName);
              File outputFile = new File(outputDir, lexerGrammarFileName);
              outputFile.delete();
            }
          }
        }
      } catch (IOException e) {
        if (exceptionWhenWritingLexerFile) {
          ErrorManager.error(ErrorManager.MSG_CANNOT_WRITE_FILE, lexerGrammarFileName, e);
        } else {
          ErrorManager.error(ErrorManager.MSG_CANNOT_OPEN_FILE, grammarFileName);
        }
      } catch (Exception e) {
        ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, grammarFileName, e);
      }
      /*
      finally {
      System.out.println("creates="+ Interval.creates);
      System.out.println("hits="+ Interval.hits);
      System.out.println("misses="+ Interval.misses);
      System.out.println("outOfRange="+ Interval.outOfRange);
      }
       */
    }
  }