예제 #1
0
파일: Tool.java 프로젝트: laiello/perseph
 public void sortGrammarFiles() throws IOException {
   // System.out.println("Grammar names "+getGrammarFileNames());
   Graph g = new Graph();
   List<String> missingFiles = new ArrayList<String>();
   for (String gfile : grammarFileNames) {
     try {
       GrammarSpelunker grammar = new GrammarSpelunker(inputDirectory, gfile);
       grammar.parse();
       String vocabName = grammar.getTokenVocab();
       String grammarName = grammar.getGrammarName();
       // Make all grammars depend on any tokenVocab options
       if (vocabName != null) g.addEdge(gfile, vocabName + CodeGenerator.VOCAB_FILE_EXTENSION);
       // Make all generated tokens files depend on their grammars
       g.addEdge(grammarName + CodeGenerator.VOCAB_FILE_EXTENSION, gfile);
     } catch (FileNotFoundException fnfe) {
       ErrorManager.error(ErrorManager.MSG_CANNOT_OPEN_FILE, gfile);
       missingFiles.add(gfile);
     }
   }
   List<Object> sorted = g.sort();
   // System.out.println("sorted="+sorted);
   grammarFileNames.clear(); // wipe so we can give new ordered list
   for (int i = 0; i < sorted.size(); i++) {
     String f = (String) sorted.get(i);
     if (missingFiles.contains(f)) continue;
     if (!(f.endsWith(".g") || f.endsWith(".g3"))) continue;
     grammarFileNames.add(f);
   }
   // System.out.println("new grammars="+grammarFileNames);
 }
예제 #2
0
파일: Tool.java 프로젝트: laiello/perseph
  protected void generateNFAs(Grammar g) {
    DOTGenerator dotGenerator = new DOTGenerator(g);
    Collection rules = g.getAllImportedRules();
    rules.addAll(g.getRules());

    for (Iterator itr = rules.iterator(); itr.hasNext(); ) {
      Rule r = (Rule) itr.next();
      try {
        String dot = dotGenerator.getDOT(r.startState);
        if (dot != null) {
          writeDOTFile(g, r, dot);
        }
      } catch (IOException ioe) {
        ErrorManager.error(ErrorManager.MSG_CANNOT_WRITE_FILE, ioe);
      }
    }
  }
예제 #3
0
파일: Tool.java 프로젝트: laiello/perseph
 public void generateDFAs(Grammar g) {
   for (int d = 1; d <= g.getNumberOfDecisions(); d++) {
     DFA dfa = g.getLookaheadDFA(d);
     if (dfa == null) {
       continue; // not there for some reason, ignore
     }
     DOTGenerator dotGenerator = new DOTGenerator(g);
     String dot = dotGenerator.getDOT(dfa.startState);
     String dotFileName = g.name + "." + "dec-" + d;
     if (g.implicitLexer) {
       dotFileName = g.name + Grammar.grammarTypeToFileNameSuffix[g.type] + "." + "dec-" + d;
     }
     try {
       writeDOTFile(g, dotFileName, dot);
     } catch (IOException ioe) {
       ErrorManager.error(ErrorManager.MSG_CANNOT_GEN_DOT_FILE, dotFileName, ioe);
     }
   }
 }
예제 #4
0
파일: Tool.java 프로젝트: laiello/perseph
  public void process() {
    boolean exceptionWhenWritingLexerFile = false;
    String lexerGrammarFileName = null; // necessary at this scope to have access in the catch below

    // Have to be tricky here when Maven or build tools call in and must new Tool()
    // before setting options. The banner won't display that way!
    if (isVerbose() && showBanner) {
      ErrorManager.info("ANTLR Parser Generator  Version " + VERSION);
      showBanner = false;
    }

    try {
      sortGrammarFiles(); // update grammarFileNames
    } catch (Exception e) {
      ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e);
    } catch (Error e) {
      ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e);
    }

    for (String grammarFileName : grammarFileNames) {
      // If we are in make mode (to support build tools like Maven) and the
      // file is already up to date, then we do not build it (and in verbose mode
      // we will say so).
      if (make) {
        try {
          if (!buildRequired(grammarFileName)) continue;
        } catch (Exception e) {
          ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, e);
        }
      }

      if (isVerbose() && !isDepend()) {
        System.out.println(grammarFileName);
      }
      try {
        if (isDepend()) {
          BuildDependencyGenerator dep = new BuildDependencyGenerator(this, grammarFileName);
          /*
          List outputFiles = dep.getGeneratedFileList();
          List dependents = dep.getDependenciesFileList();
          System.out.println("output: "+outputFiles);
          System.out.println("dependents: "+dependents);
           */
          System.out.println(dep.getDependencies());
          continue;
        }

        Grammar grammar = getRootGrammar(grammarFileName);
        // we now have all grammars read in as ASTs
        // (i.e., root and all delegates)
        grammar.composite.assignTokenTypes();
        grammar.composite.defineGrammarSymbols();
        grammar.composite.createNFAs();

        generateRecognizer(grammar);

        if (isPrintGrammar()) {
          grammar.printGrammar(System.out);
        }

        if (isReport()) {
          GrammarReport greport = new GrammarReport(grammar);
          System.out.println(greport.toString());
          // print out a backtracking report too (that is not encoded into log)
          System.out.println(greport.getBacktrackingReport());
          // same for aborted NFA->DFA conversions
          System.out.println(greport.getAnalysisTimeoutReport());
        }
        if (isProfile()) {
          GrammarReport greport = new GrammarReport(grammar);
          Stats.writeReport(GrammarReport.GRAMMAR_STATS_FILENAME, greport.toNotifyString());
        }

        // now handle the lexer if one was created for a merged spec
        String lexerGrammarStr = grammar.getLexerGrammar();
        // System.out.println("lexer grammar:\n"+lexerGrammarStr);
        if (grammar.type == Grammar.COMBINED && lexerGrammarStr != null) {
          lexerGrammarFileName = grammar.getImplicitlyGeneratedLexerFileName();
          try {
            Writer w = getOutputFile(grammar, lexerGrammarFileName);
            w.write(lexerGrammarStr);
            w.close();
          } catch (IOException e) {
            // emit different error message when creating the implicit lexer fails
            // due to write permission error
            exceptionWhenWritingLexerFile = true;
            throw e;
          }
          try {
            StringReader sr = new StringReader(lexerGrammarStr);
            Grammar lexerGrammar = new Grammar();
            lexerGrammar.composite.watchNFAConversion = internalOption_watchNFAConversion;
            lexerGrammar.implicitLexer = true;
            lexerGrammar.setTool(this);
            File lexerGrammarFullFile =
                new File(getFileDirectory(lexerGrammarFileName), lexerGrammarFileName);
            lexerGrammar.setFileName(lexerGrammarFullFile.toString());

            lexerGrammar.importTokenVocabulary(grammar);
            lexerGrammar.parseAndBuildAST(sr);

            sr.close();

            lexerGrammar.composite.assignTokenTypes();
            lexerGrammar.composite.defineGrammarSymbols();
            lexerGrammar.composite.createNFAs();

            generateRecognizer(lexerGrammar);
          } finally {
            // make sure we clean up
            if (deleteTempLexer) {
              File outputDir = getOutputDirectory(lexerGrammarFileName);
              File outputFile = new File(outputDir, lexerGrammarFileName);
              outputFile.delete();
            }
          }
        }
      } catch (IOException e) {
        if (exceptionWhenWritingLexerFile) {
          ErrorManager.error(ErrorManager.MSG_CANNOT_WRITE_FILE, lexerGrammarFileName, e);
        } else {
          ErrorManager.error(ErrorManager.MSG_CANNOT_OPEN_FILE, grammarFileName);
        }
      } catch (Exception e) {
        ErrorManager.error(ErrorManager.MSG_INTERNAL_ERROR, grammarFileName, e);
      }
      /*
      finally {
      System.out.println("creates="+ Interval.creates);
      System.out.println("hits="+ Interval.hits);
      System.out.println("misses="+ Interval.misses);
      System.out.println("outOfRange="+ Interval.outOfRange);
      }
       */
    }
  }
예제 #5
0
파일: Tool.java 프로젝트: laiello/perseph
  public void processArgs(String[] args) {

    if (isVerbose()) {
      ErrorManager.info("ANTLR Parser Generator  Version " + VERSION);
      showBanner = false;
    }

    if (args == null || args.length == 0) {
      help();
      return;
    }
    for (int i = 0; i < args.length; i++) {
      if (args[i].equals("-o") || args[i].equals("-fo")) {
        if (i + 1 >= args.length) {
          System.err.println("missing output directory with -fo/-o option; ignoring");
        } else {
          if (args[i].equals("-fo")) { // force output into dir
            setForceAllFilesToOutputDir(true);
          }
          i++;
          outputDirectory = args[i];
          if (outputDirectory.endsWith("/") || outputDirectory.endsWith("\\")) {
            outputDirectory = outputDirectory.substring(0, getOutputDirectory().length() - 1);
          }
          File outDir = new File(outputDirectory);
          haveOutputDir = true;
          if (outDir.exists() && !outDir.isDirectory()) {
            ErrorManager.error(ErrorManager.MSG_OUTPUT_DIR_IS_FILE, outputDirectory);
            setLibDirectory(".");
          }
        }
      } else if (args[i].equals("-lib")) {
        if (i + 1 >= args.length) {
          System.err.println("missing library directory with -lib option; ignoring");
        } else {
          i++;
          setLibDirectory(args[i]);
          if (getLibraryDirectory().endsWith("/") || getLibraryDirectory().endsWith("\\")) {
            setLibDirectory(getLibraryDirectory().substring(0, getLibraryDirectory().length() - 1));
          }
          File outDir = new File(getLibraryDirectory());
          if (!outDir.exists()) {
            ErrorManager.error(ErrorManager.MSG_DIR_NOT_FOUND, getLibraryDirectory());
            setLibDirectory(".");
          }
        }
      } else if (args[i].equals("-nfa")) {
        setGenerate_NFA_dot(true);
      } else if (args[i].equals("-dfa")) {
        setGenerate_DFA_dot(true);
      } else if (args[i].equals("-debug")) {
        setDebug(true);
      } else if (args[i].equals("-trace")) {
        setTrace(true);
      } else if (args[i].equals("-report")) {
        setReport(true);
      } else if (args[i].equals("-profile")) {
        setProfile(true);
      } else if (args[i].equals("-print")) {
        setPrintGrammar(true);
      } else if (args[i].equals("-depend")) {
        setDepend(true);
      } else if (args[i].equals("-verbose")) {
        setVerbose(true);
      } else if (args[i].equals("-version")) {
        version();
        exitNow = true;
      } else if (args[i].equals("-make")) {
        setMake(true);
      } else if (args[i].equals("-message-format")) {
        if (i + 1 >= args.length) {
          System.err.println("missing output format with -message-format option; using default");
        } else {
          i++;
          ErrorManager.setFormat(args[i]);
        }
      } else if (args[i].equals("-Xgrtree")) {
        internalOption_PrintGrammarTree = true; // print grammar tree
      } else if (args[i].equals("-Xdfa")) {
        internalOption_PrintDFA = true;
      } else if (args[i].equals("-Xnoprune")) {
        DFAOptimizer.PRUNE_EBNF_EXIT_BRANCHES = false;
      } else if (args[i].equals("-Xnocollapse")) {
        DFAOptimizer.COLLAPSE_ALL_PARALLEL_EDGES = false;
      } else if (args[i].equals("-Xdbgconversion")) {
        NFAToDFAConverter.debug = true;
      } else if (args[i].equals("-Xmultithreaded")) {
        NFAToDFAConverter.SINGLE_THREADED_NFA_CONVERSION = false;
      } else if (args[i].equals("-Xnomergestopstates")) {
        DFAOptimizer.MERGE_STOP_STATES = false;
      } else if (args[i].equals("-Xdfaverbose")) {
        internalOption_ShowNFAConfigsInDFA = true;
      } else if (args[i].equals("-Xwatchconversion")) {
        internalOption_watchNFAConversion = true;
      } else if (args[i].equals("-XdbgST")) {
        CodeGenerator.EMIT_TEMPLATE_DELIMITERS = true;
      } else if (args[i].equals("-Xmaxinlinedfastates")) {
        if (i + 1 >= args.length) {
          System.err.println("missing max inline dfa states -Xmaxinlinedfastates option; ignoring");
        } else {
          i++;
          CodeGenerator.MAX_ACYCLIC_DFA_STATES_INLINE = Integer.parseInt(args[i]);
        }
      } else if (args[i].equals("-Xmaxswitchcaselabels")) {
        if (i + 1 >= args.length) {
          System.err.println(
              "missing max switch case labels -Xmaxswitchcaselabels option; ignoring");
        } else {
          i++;
          CodeGenerator.MAX_SWITCH_CASE_LABELS = Integer.parseInt(args[i]);
        }
      } else if (args[i].equals("-Xminswitchalts")) {
        if (i + 1 >= args.length) {
          System.err.println("missing min switch alternatives -Xminswitchalts option; ignoring");
        } else {
          i++;
          CodeGenerator.MIN_SWITCH_ALTS = Integer.parseInt(args[i]);
        }
      } else if (args[i].equals("-Xm")) {
        if (i + 1 >= args.length) {
          System.err.println("missing max recursion with -Xm option; ignoring");
        } else {
          i++;
          NFAContext.MAX_SAME_RULE_INVOCATIONS_PER_NFA_CONFIG_STACK = Integer.parseInt(args[i]);
        }
      } else if (args[i].equals("-Xmaxdfaedges")) {
        if (i + 1 >= args.length) {
          System.err.println("missing max number of edges with -Xmaxdfaedges option; ignoring");
        } else {
          i++;
          DFA.MAX_STATE_TRANSITIONS_FOR_TABLE = Integer.parseInt(args[i]);
        }
      } else if (args[i].equals("-Xconversiontimeout")) {
        if (i + 1 >= args.length) {
          System.err.println("missing max time in ms -Xconversiontimeout option; ignoring");
        } else {
          i++;
          DFA.MAX_TIME_PER_DFA_CREATION = Integer.parseInt(args[i]);
        }
      } else if (args[i].equals("-Xnfastates")) {
        DecisionProbe.verbose = true;
      } else if (args[i].equals("-X")) {
        Xhelp();
      } else {
        if (args[i].charAt(0) != '-') {
          // Must be the grammar file
          addGrammarFile(args[i]);
        }
      }
    }
  }