@Test
 public void testRuleRef() throws Exception {
   Grammar g = new Grammar("parser grammar T;\n" + "a : e ;\n" + "e : E ;\n");
   String expecting =
       "max type 1\n"
           + "0:RULE_START 0\n"
           + "1:RULE_STOP 0\n"
           + "2:RULE_START 1\n"
           + "3:RULE_STOP 1\n"
           + "4:BASIC 0\n"
           + "5:BASIC 0\n"
           + "6:BASIC 1\n"
           + "7:BASIC 1\n"
           + "8:BASIC 1\n"
           + "rule 0:0\n"
           + "rule 1:2\n"
           + "0->4 EPSILON 0,0,0\n"
           + "2->6 EPSILON 0,0,0\n"
           + "4->5 RULE 2,1,0\n"
           + "5->1 EPSILON 0,0,0\n"
           + "6->7 ATOM 1,0,0\n"
           + "7->3 EPSILON 0,0,0\n";
   ATN atn = createATN(g, true);
   String result = ATNSerializer.getDecoded(atn, Arrays.asList(g.getTokenNames()));
   assertEquals(expecting, result);
 }
  @Test
  public void testParserRuleRefInLexerRule() throws Exception {
    boolean threwException = false;
    ErrorQueue errorQueue = new ErrorQueue();
    try {
      String gstr = "grammar U;\n" + "a : A;\n" + "A : a;\n";

      Tool tool = new Tool();
      tool.removeListeners();
      tool.addListener(errorQueue);
      assertEquals(0, errorQueue.size());
      GrammarRootAST grammarRootAST = tool.parseGrammarFromString(gstr);
      assertEquals(0, errorQueue.size());
      Grammar g = tool.createGrammar(grammarRootAST);
      assertEquals(0, errorQueue.size());
      g.fileName = "<string>";
      tool.process(g, false);
    } catch (Exception e) {
      threwException = true;
      e.printStackTrace();
    }
    System.out.println(errorQueue);
    assertEquals(1, errorQueue.errors.size());
    assertEquals(ErrorType.PARSER_RULE_REF_IN_LEXER_RULE, errorQueue.errors.get(0).getErrorType());
    assertEquals("[a, A]", Arrays.toString(errorQueue.errors.get(0).getArgs()));
    assertTrue(!threwException);
  }
  public void translateLeftRecursiveRules() {
    String language = g.getOptionString("language");
    // translate all recursive rules
    List<String> leftRecursiveRuleNames = new ArrayList<String>();
    for (Rule r : rules) {
      if (!Grammar.isTokenName(r.name)) {
        if (LeftRecursiveRuleAnalyzer.hasImmediateRecursiveRuleRefs(r.ast, r.name)) {
          g.originalTokenStream = g.tokenStream;
          boolean fitsPattern = translateLeftRecursiveRule(ast, (LeftRecursiveRule) r, language);
          if (fitsPattern) leftRecursiveRuleNames.add(r.name);
        }
      }
    }

    // update all refs to recursive rules to have [0] argument
    for (GrammarAST r : ast.getNodesWithType(ANTLRParser.RULE_REF)) {
      if (r.getParent().getType() == ANTLRParser.RULE) continue; // must be rule def
      if (((GrammarASTWithOptions) r).getOptionString(PRECEDENCE_OPTION_NAME) != null)
        continue; // already has arg; must be in rewritten rule
      if (leftRecursiveRuleNames.contains(r.getText())) {
        // found ref to recursive rule not already rewritten with arg
        ((GrammarASTWithOptions) r)
            .setOption(
                PRECEDENCE_OPTION_NAME,
                (GrammarAST) new GrammarASTAdaptor().create(ANTLRParser.INT, "0"));
      }
    }
  }
Exemple #4
0
 public void loadImportedGrammars() {
   if (ast == null) return;
   GrammarAST i = (GrammarAST) ast.getFirstChildWithType(ANTLRParser.IMPORT);
   if (i == null) return;
   importedGrammars = new ArrayList<Grammar>();
   for (Object c : i.getChildren()) {
     GrammarAST t = (GrammarAST) c;
     String importedGrammarName = null;
     if (t.getType() == ANTLRParser.ASSIGN) {
       t = (GrammarAST) t.getChild(1);
       importedGrammarName = t.getText();
       tool.log("grammar", "import " + importedGrammarName);
     } else if (t.getType() == ANTLRParser.ID) {
       importedGrammarName = t.getText();
       tool.log("grammar", "import " + t.getText());
     }
     Grammar g;
     try {
       g = tool.loadImportedGrammar(this, t);
     } catch (IOException ioe) {
       tool.errMgr.grammarError(
           ErrorType.ERROR_READING_IMPORTED_GRAMMAR,
           importedGrammarName,
           t.getToken(),
           importedGrammarName,
           name);
       continue;
     }
     // did it come back as error node or missing?
     if (g == null) continue;
     g.parent = this;
     importedGrammars.add(g);
     g.loadImportedGrammars(); // recursively pursue any imports in this import
   }
 }
 @Test
 public void testSimpleLoop() throws Exception {
   Grammar g = new Grammar("parser grammar T;\n" + "a : A+ B ;");
   String expecting =
       "max type 2\n"
           + "0:RULE_START 0\n"
           + "1:RULE_STOP 0\n"
           + "2:BASIC 0\n"
           + "3:PLUS_BLOCK_START 0 4\n"
           + "4:BLOCK_END 0\n"
           + "5:PLUS_LOOP_BACK 0\n"
           + "6:LOOP_END 0 5\n"
           + "7:BASIC 0\n"
           + "8:BASIC 0\n"
           + "9:BASIC 0\n"
           + "rule 0:0\n"
           + "0->3 EPSILON 0,0,0\n"
           + "2->4 ATOM 1,0,0\n"
           + "3->2 EPSILON 0,0,0\n"
           + "4->5 EPSILON 0,0,0\n"
           + "5->3 EPSILON 0,0,0\n"
           + "5->6 EPSILON 0,0,0\n"
           + "6->7 EPSILON 0,0,0\n"
           + "7->8 ATOM 2,0,0\n"
           + "8->1 EPSILON 0,0,0\n"
           + "0:5\n";
   ATN atn = createATN(g, true);
   String result = ATNSerializer.getDecoded(atn, Arrays.asList(g.getTokenNames()));
   assertEquals(expecting, result);
 }
 @Test
 public void test3Alts() throws Exception {
   Grammar g = new Grammar("parser grammar T;\n" + "a : A | A B | A B C ;");
   String expecting =
       "max type 3\n"
           + "0:RULE_START 0\n"
           + "1:RULE_STOP 0\n"
           + "2:BASIC 0\n"
           + "3:BASIC 0\n"
           + "4:BASIC 0\n"
           + "5:BASIC 0\n"
           + "6:BASIC 0\n"
           + "7:BASIC 0\n"
           + "8:BLOCK_START 0 9\n"
           + "9:BLOCK_END 0\n"
           + "10:BASIC 0\n"
           + "rule 0:0\n"
           + "0->8 EPSILON 0,0,0\n"
           + "2->9 ATOM 1,0,0\n"
           + "3->4 ATOM 1,0,0\n"
           + "4->9 ATOM 2,0,0\n"
           + "5->6 ATOM 1,0,0\n"
           + "6->7 ATOM 2,0,0\n"
           + "7->9 ATOM 3,0,0\n"
           + "8->2 EPSILON 0,0,0\n"
           + "8->3 EPSILON 0,0,0\n"
           + "8->5 EPSILON 0,0,0\n"
           + "9->1 EPSILON 0,0,0\n"
           + "0:8\n";
   ATN atn = createATN(g, true);
   String result = ATNSerializer.getDecoded(atn, Arrays.asList(g.getTokenNames()));
   assertEquals(expecting, result);
 }
Exemple #7
0
  public void testActions(String templates, String actionName, String action, String expected)
      throws org.antlr.runtime.RecognitionException {
    int lp = templates.indexOf('(');
    String name = templates.substring(0, lp);
    STGroup group = new STGroupString(templates);
    ST st = group.getInstanceOf(name);
    st.add(actionName, action);
    String grammar = st.render();
    ErrorQueue equeue = new ErrorQueue();
    Grammar g = new Grammar(grammar, equeue);
    if (g.ast != null && !g.ast.hasErrors) {
      SemanticPipeline sem = new SemanticPipeline(g);
      sem.process();

      ATNFactory factory = new ParserATNFactory(g);
      if (g.isLexer()) factory = new LexerATNFactory((LexerGrammar) g);
      g.atn = factory.createATN();

      CodeGenerator gen = new CodeGenerator(g);
      ST outputFileST = gen.generateParser();
      String output = outputFileST.render();
      // System.out.println(output);
      String b = "#" + actionName + "#";
      int start = output.indexOf(b);
      String e = "#end-" + actionName + "#";
      int end = output.indexOf(e);
      String snippet = output.substring(start + b.length(), end);
      assertEquals(expected, snippet);
    }
    if (equeue.size() > 0) {
      System.err.println(equeue.toString());
    }
  }
 protected int getTokenType(@NotNull GrammarAST atom) {
   int ttype;
   if (g.isLexer()) {
     ttype = CharSupport.getCharValueFromGrammarCharLiteral(atom.getText());
   } else {
     ttype = g.getTokenType(atom.getText());
   }
   return ttype;
 }
Exemple #9
0
  ATN createATN(Grammar g) {
    if (g.atn != null) return g.atn;
    semanticProcess(g);

    ParserATNFactory f = new ParserATNFactory(g);
    if (g.isLexer()) f = new LexerATNFactory((LexerGrammar) g);
    g.atn = f.createATN();

    return g.atn;
  }
Exemple #10
0
 /**
  * Get list of all imports from all grammars in the delegate subtree of g. The grammars are in
  * import tree preorder. Don't include ourselves in list as we're not a delegate of ourselves.
  */
 public List<Grammar> getAllImportedGrammars() {
   if (importedGrammars == null) return null;
   List<Grammar> delegates = new ArrayList<Grammar>();
   for (Grammar d : importedGrammars) {
     delegates.add(d);
     List<Grammar> ds = d.getAllImportedGrammars();
     if (ds != null) delegates.addAll(ds);
   }
   return delegates;
 }
Exemple #11
0
 /**
  * Get list of all imports from all grammars in the delegate subtree of g. The grammars are in
  * import tree preorder. Don't include ourselves in list as we're not a delegate of ourselves.
  */
 public List<Grammar> getAllImportedGrammars() {
   if (importedGrammars == null) return null;
   List<Grammar> delegates = new ArrayList<Grammar>();
   for (int i = 0; i < importedGrammars.size(); i++) {
     Grammar d = importedGrammars.get(i);
     delegates.add(d);
     List<Grammar> ds = d.getAllImportedGrammars();
     if (ds != null) delegates.addAll(ds);
   }
   return delegates;
 }
Exemple #12
0
 protected void semanticProcess(Grammar g) {
   if (g.ast != null && !g.ast.hasErrors) {
     System.out.println(g.ast.toStringTree());
     Tool antlr = new Tool();
     SemanticPipeline sem = new SemanticPipeline(g);
     sem.process();
     if (g.getImportedGrammars() != null) { // process imported grammars (if any)
       for (Grammar imp : g.getImportedGrammars()) {
         antlr.processNonCombinedGrammar(imp, false);
       }
     }
   }
 }
Exemple #13
0
  void checkRuleATN(Grammar g, String ruleName, String expecting) {
    ParserATNFactory f = new ParserATNFactory(g);
    ATN atn = f.createATN();

    DOTGenerator dot = new DOTGenerator(g);
    System.out.println(dot.getDOT(atn.ruleToStartState[g.getRule(ruleName).index]));

    Rule r = g.getRule(ruleName);
    ATNState startState = atn.ruleToStartState[r.index];
    ATNPrinter serializer = new ATNPrinter(g, startState);
    String result = serializer.asString();

    // System.out.print(result);
    assertEquals(expecting, result);
  }
  @Test
  public void testWildcardStillWorks() throws Exception {
    String grammar = "parser grammar S;\n" + "a : B . C ;\n"; // not qualified ID
    mkdir(tmpdir);
    Grammar g = new Grammar(tmpdir + "/S.g4", grammar);
    g.name = "S";

    ErrorQueue equeue = new ErrorQueue();
    Tool antlr = g.tool;
    antlr.outputDirectory = tmpdir;
    antlr.libDirectory = tmpdir;
    antlr.addListener(equeue);
    antlr.process(g, true);

    assertEquals("unexpected errors: " + equeue, 0, equeue.errors.size());
  }
 public ATN createATN() {
   _createATN(g.rules.values());
   atn.maxTokenType = g.getMaxTokenType();
   addRuleFollowLinks();
   addEOFTransitionToStartRules();
   return atn;
 }
  @NotNull
  public Handle _ruleRef(@NotNull GrammarAST node) {
    Rule r = g.getRule(node.getText());
    if (r == null) {
      g.tool.errMgr.toolError(ErrorType.INTERNAL_ERROR, "Rule " + node.getText() + " undefined");
      return null;
    }
    RuleStartState start = atn.ruleToStartState[r.index];
    ATNState left = newState(node);
    ATNState right = newState(node);
    int precedence = 0;
    if (((GrammarASTWithOptions) node)
            .getOptionString(LeftRecursiveRuleTransformer.PRECEDENCE_OPTION_NAME)
        != null) {
      precedence =
          Integer.parseInt(
              ((GrammarASTWithOptions) node)
                  .getOptionString(LeftRecursiveRuleTransformer.PRECEDENCE_OPTION_NAME));
    }
    RuleTransition call = new RuleTransition(start, r.index, precedence, right);
    left.addTransition(call);

    node.atnState = left;
    return new Handle(left, right);
  }
  /**
   * From (blk)* build ( blk+ )? with *two* decisions, one for entry and one for choosing alts of
   * blk.
   *
   * <p>|-------------| v | o--[o-blk-o]->o o | ^ -----------------|
   *
   * <p>Note that the optional bypass must jump outside the loop as (A|B)* is not the same thing as
   * (A|B|)+.
   */
  @NotNull
  public Handle star(@NotNull GrammarAST starAST, @NotNull Handle elem) {
    StarBlockStartState blkStart = (StarBlockStartState) elem.left;
    BlockEndState blkEnd = (BlockEndState) elem.right;

    StarLoopEntryState entry = newState(StarLoopEntryState.class, starAST);
    atn.defineDecisionState(entry);
    LoopEndState end = newState(LoopEndState.class, starAST);
    StarLoopbackState loop = newState(StarLoopbackState.class, starAST);
    entry.loopBackState = loop;
    end.loopBackStateNumber = loop.stateNumber;

    BlockAST blkAST = (BlockAST) starAST.getChild(0);
    entry.isGreedy = isGreedy(blkAST);
    if (!g.isLexer() || entry.isGreedy) {
      epsilon(entry, blkStart); // loop enter edge (alt 1)
      epsilon(entry, end); // bypass loop edge (alt 2)
    } else { // only lexers flip entry/exit branches for nongreedy
      // if not greedy, priority to exit branch; make it first
      epsilon(entry, end); // bypass loop edge (alt 1)
      epsilon(entry, blkStart); // loop enter edge (alt 2)
    }
    epsilon(blkEnd, loop); // block end hits loop back
    epsilon(loop, entry); // loop back to entry/exit decision

    starAST.atnState = entry; // decision is to enter/exit; blk is its own decision
    return new Handle(entry, end);
  }
  /**
   * From (blk)+ build
   *
   * <p>|---------| v | [o-blk-o]->o->o
   *
   * <p>We add a decision for loop back node to the existing one at blk start.
   */
  @NotNull
  public Handle plus(@NotNull GrammarAST plusAST, @NotNull Handle blk) {
    PlusBlockStartState blkStart = (PlusBlockStartState) blk.left;
    BlockEndState blkEnd = (BlockEndState) blk.right;

    PlusLoopbackState loop = newState(PlusLoopbackState.class, plusAST);
    atn.defineDecisionState(loop);
    LoopEndState end = newState(LoopEndState.class, plusAST);
    blkStart.loopBackState = loop;
    end.loopBackStateNumber = loop.stateNumber;

    plusAST.atnState = blkStart;
    epsilon(blkEnd, loop); // blk can see loop back

    BlockAST blkAST = (BlockAST) plusAST.getChild(0);
    loop.isGreedy = isGreedy(blkAST);
    if (!g.isLexer() || loop.isGreedy) {
      epsilon(loop, blkStart); // loop back to start
      epsilon(loop, end); // or exit
    } else { // only lexers flip entry/exit branches for nongreedy
      // if not greedy, priority to exit branch; make it first
      epsilon(loop, end); // exit
      epsilon(loop, blkStart); // loop back to start
    }

    return new Handle(blkStart, end);
  }
 /**
  * From set build single edge graph o->o-set->o. To conform to what an alt block looks like, must
  * have extra state on left. This handles ~A also, converted to ~{A} set.
  */
 public Handle set(GrammarAST associatedAST, List<GrammarAST> terminals, boolean invert) {
   ATNState left = newState(associatedAST);
   ATNState right = newState(associatedAST);
   IntervalSet set = new IntervalSet();
   for (GrammarAST t : terminals) {
     int ttype = g.getTokenType(t.getText());
     set.add(ttype);
   }
   if (invert) {
     IntervalSet notSet = set.complement(Token.MIN_TOKEN_TYPE, g.getMaxTokenType());
     left.addTransition(new NotSetTransition(right, set, notSet));
   } else {
     left.addTransition(new SetTransition(right, set));
   }
   associatedAST.atnState = left;
   return new Handle(left, right);
 }
  public List<String> getEvalInfoForString(String grammarString, String pattern)
      throws RecognitionException {
    ErrorQueue equeue = new ErrorQueue();
    Grammar g = new Grammar(grammarString);
    List<String> evals = new ArrayList<String>();
    if (g.ast != null && !g.ast.hasErrors) {
      SemanticPipeline sem = new SemanticPipeline(g);
      sem.process();

      ATNFactory factory = new ParserATNFactory(g);
      if (g.isLexer()) factory = new LexerATNFactory((LexerGrammar) g);
      g.atn = factory.createATN();

      CodeGenerator gen = new CodeGenerator(g);
      ST outputFileST = gen.generateParser();

      //			STViz viz = outputFileST.inspect();
      //			try {
      //				viz.waitForClose();
      //			}
      //			catch (Exception e) {
      //				e.printStackTrace();
      //			}

      boolean debug = false;
      DebugInterpreter interp =
          new DebugInterpreter(
              outputFileST.groupThatCreatedThisInstance,
              outputFileST.impl.nativeGroup.errMgr,
              debug);
      InstanceScope scope = new InstanceScope(null, outputFileST);
      StringWriter sw = new StringWriter();
      AutoIndentWriter out = new AutoIndentWriter(sw);
      interp.exec(out, scope);

      for (String e : interp.evals) {
        if (e.contains(pattern)) {
          evals.add(e);
        }
      }
    }
    if (equeue.size() > 0) {
      System.err.println(equeue.toString());
    }
    return evals;
  }
 /** From label A build Graph o-A->o */
 public Handle tokenRef(TerminalAST node) {
   ATNState left = newState(node);
   ATNState right = newState(node);
   int ttype = g.getTokenType(node.getText());
   left.addTransition(new AtomTransition(right, ttype));
   node.atnState = left;
   return new Handle(left, right);
 }
Exemple #22
0
  /**
   * Get list of all imports from all grammars in the delegate subtree of g. The grammars are in
   * import tree preorder. Don't include ourselves in list as we're not a delegate of ourselves.
   */
  public List<Grammar> getAllImportedGrammars() {
    if (importedGrammars == null) {
      return null;
    }

    LinkedHashMap<String, Grammar> delegates = new LinkedHashMap<String, Grammar>();
    for (Grammar d : importedGrammars) {
      delegates.put(d.fileName, d);
      List<Grammar> ds = d.getAllImportedGrammars();
      if (ds != null) {
        for (Grammar imported : ds) {
          delegates.put(imported.fileName, imported);
        }
      }
    }

    return new ArrayList<Grammar>(delegates.values());
  }
Exemple #23
0
 @Test
 public void testWildcard() throws Exception {
   Grammar g = new Grammar("parser grammar T;\n" + "tokens {A, B, C}\n" + "a : . ;");
   String expecting =
       "max type 3\n"
           + "0:RULE_START 0\n"
           + "1:RULE_STOP 0\n"
           + "2:BASIC 0\n"
           + "3:BASIC 0\n"
           + "4:BASIC 0\n"
           + "rule 0:0\n"
           + "0->2 EPSILON 0,0,0\n"
           + "2->3 WILDCARD 0,0,0\n"
           + "3->1 EPSILON 0,0,0\n";
   ATN atn = createATN(g, true);
   String result = ATNSerializer.getDecoded(atn, Arrays.asList(g.getTokenNames()));
   assertEquals(expecting, result);
 }
Exemple #24
0
 List<Integer> getTypesFromString(Grammar g, String expecting) {
   List<Integer> expectingTokenTypes = new ArrayList<Integer>();
   if (expecting != null && !expecting.trim().equals("")) {
     for (String tname : expecting.replace(" ", "").split(",")) {
       int ttype = g.getTokenType(tname);
       expectingTokenTypes.add(ttype);
     }
   }
   return expectingTokenTypes;
 }
Exemple #25
0
 IntegerList getTypesFromString(Grammar g, String expecting) {
   IntegerList expectingTokenTypes = new IntegerList();
   if (expecting != null && !expecting.trim().isEmpty()) {
     for (String tname : expecting.replace(" ", "").split(",")) {
       int ttype = g.getTokenType(tname);
       expectingTokenTypes.add(ttype);
     }
   }
   return expectingTokenTypes;
 }
Exemple #26
0
 List<ANTLRMessage> checkRuleDFA(String gtext, String ruleName, String expecting)
     throws Exception {
   ErrorQueue equeue = new ErrorQueue();
   Grammar g = new Grammar(gtext, equeue);
   ATN atn = createATN(g);
   ATNState s = atn.ruleToStartState[g.getRule(ruleName).index];
   if (s == null) {
     System.err.println("no such rule: " + ruleName);
     return null;
   }
   ATNState t = s.transition(0).target;
   if (!(t instanceof DecisionState)) {
     System.out.println(ruleName + " has no decision");
     return null;
   }
   DecisionState blk = (DecisionState) t;
   checkRuleDFA(g, blk, expecting);
   return equeue.all;
 }
Exemple #27
0
 @Test
 public void testEOFInSet() throws Exception {
   Grammar g = new Grammar("parser grammar T;\n" + "a : (A|EOF) ;");
   String expecting =
       "max type 1\n"
           + "0:RULE_START 0\n"
           + "1:RULE_STOP 0\n"
           + "2:BASIC 0\n"
           + "3:BASIC 0\n"
           + "4:BASIC 0\n"
           + "rule 0:0\n"
           + "0:EOF, A..A\n"
           + "0->2 EPSILON 0,0,0\n"
           + "2->3 SET 0,0,0\n"
           + "3->1 EPSILON 0,0,0\n";
   ATN atn = createATN(g, true);
   String result = ATNSerializer.getDecoded(atn, Arrays.asList(g.getTokenNames()));
   assertEquals(expecting, result);
 }
  private void processRules(
      DocumentSnapshot snapshot,
      CompiledFileModelV4 result,
      Collection<? extends GrammarAST> rules,
      Collection<Description> parserRules,
      Collection<Description> lexerRules) {
    for (GrammarAST child : rules) {
      if (child.getChild(0) instanceof GrammarASTErrorNode) {
        continue;
      }

      if (((GrammarAST) child.getChild(0)).g != result.getGrammar()) {
        continue;
      }

      String ruleName = child.getChild(0).getText();
      if ("Tokens".equals(ruleName)) {
        continue;
      }

      DeclarationKind declarationKind;
      if (Grammar.isTokenName(ruleName)) {
        declarationKind = DeclarationKind.LEXER_RULE;
      } else {
        declarationKind = DeclarationKind.PARSER_RULE;
      }

      GrammarNode.GrammarNodeDescription ruleDescription =
          new GrammarNode.GrammarNodeDescription(declarationKind, ruleName);
      ruleDescription.setOffset(snapshot, result.getFileObject(), getElementOffset(child));
      ruleDescription.setSpan(getSpan(snapshot, result, child));
      ruleDescription.setInherited(
          snapshot
              == null); // for now, go on the fact that snapshots aren't available for imported
                        // files

      if (Grammar.isTokenName(ruleName)) {
        lexerRules.add(ruleDescription);
      } else {
        parserRules.add(ruleDescription);
      }
    }
  }
Exemple #29
0
 @Test
 public void testSimpleNoBlock() throws Exception {
   Grammar g = new Grammar("parser grammar T;\n" + "a : A B ;");
   String expecting =
       "max type 2\n"
           + "0:RULE_START 0\n"
           + "1:RULE_STOP 0\n"
           + "2:BASIC 0\n"
           + "3:BASIC 0\n"
           + "4:BASIC 0\n"
           + "5:BASIC 0\n"
           + "rule 0:0\n"
           + "0->2 EPSILON 0,0,0\n"
           + "2->3 ATOM 1,0,0\n"
           + "3->4 ATOM 2,0,0\n"
           + "4->1 EPSILON 0,0,0\n";
   ATN atn = createATN(g, true);
   String result = ATNSerializer.getDecoded(atn, Arrays.asList(g.getTokenNames()));
   assertEquals(expecting, result);
 }
  public ParserATNFactory(@NotNull Grammar g) {
    if (g == null) {
      throw new NullPointerException("g");
    }

    this.g = g;

    ATNType atnType = g instanceof LexerGrammar ? ATNType.LEXER : ATNType.PARSER;
    int maxTokenType = g.getMaxTokenType();
    this.atn = new ATN(atnType, maxTokenType);
  }