public final void mSTRING(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = STRING;
    int _saveIndex;

    _saveIndex = text.length();
    mQUOTE(false);
    text.setLength(_saveIndex);
    {
      _loop28:
      do {
        if ((_tokenSet_1.member(LA(1)))) {
          matchNot('"');
        } else {
          break _loop28;
        }

      } while (true);
    }
    _saveIndex = text.length();
    mQUOTE(false);
    text.setLength(_saveIndex);
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#2
0
 /**
  * Overrides the base behavior to retry keywords as identifiers.
  *
  * @param token The token.
  * @param ex The recognition exception.
  * @return AST - The new AST.
  * @throws antlr.RecognitionException if the substitution was not possible.
  * @throws antlr.TokenStreamException if the substitution was not possible.
  */
 public AST handleIdentifierError(Token token, RecognitionException ex)
     throws RecognitionException, TokenStreamException {
   // If the token can tell us if it could be an identifier...
   if (token instanceof HqlToken) {
     HqlToken hqlToken = (HqlToken) token;
     // ... and the token could be an identifer and the error is
     // a mismatched token error ...
     if (hqlToken.isPossibleID() && (ex instanceof MismatchedTokenException)) {
       MismatchedTokenException mte = (MismatchedTokenException) ex;
       // ... and the expected token type was an identifier, then:
       if (mte.expecting == HqlTokenTypes.IDENT) {
         // Use the token as an identifier.
         reportWarning(
             "Keyword  '"
                 + token.getText()
                 + "' is being interpreted as an identifier due to: "
                 + mte.getMessage());
         // Add the token to the AST.
         ASTPair currentAST = new ASTPair();
         token.setType(HqlTokenTypes.WEIRD_IDENT);
         astFactory.addASTChild(currentAST, astFactory.create(token));
         consume();
         AST identifierAST = currentAST.root;
         return identifierAST;
       }
     } // if
   } // if
   // Otherwise, handle the error normally.
   return super.handleIdentifierError(token, ex);
 }
示例#3
0
  public final void mINT(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = INT;
    int _saveIndex;

    {
      int _cnt23 = 0;
      _loop23:
      do {
        if (((LA(1) >= '0' && LA(1) <= '9'))) {
          matchRange('0', '9');
        } else {
          if (_cnt23 >= 1) {
            break _loop23;
          } else {
            throw new NoViableAltForCharException(
                (char) LA(1), getFilename(), getLine(), getColumn());
          }
        }

        _cnt23++;
      } while (true);
    }
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
  public final void mFIELDTYPE(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = FIELDTYPE;
    int _saveIndex;

    {
      int _cnt33 = 0;
      _loop33:
      do {
        if ((_tokenSet_0.member(LA(1)))) {
          mLETTER(false);
        } else {
          if (_cnt33 >= 1) {
            break _loop33;
          } else {
            throw new NoViableAltForCharException(LA(1), getFilename(), getLine(), getColumn());
          }
        }

        _cnt33++;
      } while (true);
    }
    _ttype = testLiteralsTable(_ttype);
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
  public final void arrayVals() throws RecognitionException, TokenStreamException {

    Token f = null;
    Token r = null;

    f = LT(1);
    match(STRING_LITERAL);
    if (inputState.guessing == 0) {
      first(f.getText());
    }
    {
      _loop14:
      do {
        if ((LA(1) == COMMA)) {
          match(COMMA);
          r = LT(1);
          match(STRING_LITERAL);
          if (inputState.guessing == 0) {
            rest(r.getText());
          }
        } else {
          break _loop14;
        }

      } while (true);
    }
  }
  public final void mWS(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = WS;

    int _saveIndex;

    {
      switch (LA(1)) {
        case ' ':
          {
            match(' ');
            break;
          }
        case '\t':
          {
            match('\t');
            break;
          }
        default:
          {
            throw new NoViableAltForCharException(LA(1), getFilename(), getLine(), getColumn());
          }
      }
    }
    _ttype = Token.SKIP;
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
  public final void keyDef() throws RecognitionException, TokenStreamException {

    Token s = null;
    Token i = null;

    {
      switch (LA(1)) {
        case STRING_LITERAL:
          {
            s = LT(1);
            match(STRING_LITERAL);
            if (inputState.guessing == 0) {
              key(s.getText());
            }
            break;
          }
        case IDENT:
          {
            i = LT(1);
            match(IDENT);
            if (inputState.guessing == 0) {
              key(i.getText());
            }
            break;
          }
        default:
          {
            throw new NoViableAltException(LT(1), getFilename());
          }
      }
    }
    match(DOUBLEDOT);
  }
 public String getDocumentation() {
   String result = "";
   Token t = getHiddenBefore();
   if (t != null && t.getType() == DataScriptParserTokenTypes.DOC) {
     result = t.getText();
   }
   return result;
 }
示例#9
0
  protected final void mHEX_DIGIT(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = HEX_DIGIT;
    int _saveIndex;

    {
      switch (LA(1)) {
        case '0':
        case '1':
        case '2':
        case '3':
        case '4':
        case '5':
        case '6':
        case '7':
        case '8':
        case '9':
          {
            matchRange('0', '9');
            break;
          }
        case 'A':
        case 'B':
        case 'C':
        case 'D':
        case 'E':
        case 'F':
          {
            matchRange('A', 'F');
            break;
          }
        case 'a':
        case 'b':
        case 'c':
        case 'd':
        case 'e':
        case 'f':
          {
            matchRange('a', 'f');
            break;
          }
        default:
          {
            throw new NoViableAltForCharException(
                (char) LA(1), getFilename(), getLine(), getColumn());
          }
      }
    }
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#10
0
 protected void showTokenStreamOf(String text) throws CharStreamException, TokenStreamException {
   Reader input = new StringReader(text);
   LexerImpl lexer = new LexerImpl(input);
   Token t = lexer.nextToken();
   while (t.getType() != Token.EOF_TYPE) {
     System.out.println(t.toString());
     t = lexer.nextToken();
   }
   System.out.println("<END OF STREAM>");
 }
  public final void valueDef() throws RecognitionException, TokenStreamException {

    Token s = null;

    s = LT(1);
    match(STRING_LITERAL);
    if (inputState.guessing == 0) {
      value(s.getText());
    }
  }
示例#12
0
  public final void mSL_COMMENT(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = SL_COMMENT;
    int _saveIndex;

    match("#");
    {
      _loop56:
      do {
        if ((_tokenSet_0.member(LA(1)))) {
          {
            match(_tokenSet_0);
          }
        } else {
          break _loop56;
        }

      } while (true);
    }
    {
      switch (LA(1)) {
        case '\n':
          {
            match('\n');
            break;
          }
        case '\r':
          {
            match('\r');
            {
              if ((LA(1) == '\n')) {
                match('\n');
              } else {
              }
            }
            break;
          }
        default:
          {
            throw new NoViableAltForCharException(
                (char) LA(1), getFilename(), getLine(), getColumn());
          }
      }
    }
    _ttype = Token.SKIP;
    newline();
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#13
0
  public final void mWS(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = WS;
    int _saveIndex;

    {
      int _cnt14 = 0;
      _loop14:
      do {
        switch (LA(1)) {
          case ' ':
            {
              match(' ');
              break;
            }
          case '\t':
            {
              match('\t');
              break;
            }
          case '\n':
            {
              match('\n');
              newline();
              break;
            }
          case '\r':
            {
              match('\r');
              break;
            }
          default:
            {
              if (_cnt14 >= 1) {
                break _loop14;
              } else {
                throw new NoViableAltForCharException(
                    (char) LA(1), getFilename(), getLine(), getColumn());
              }
            }
        }
        _cnt14++;
      } while (true);
    }
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#14
0
  protected final void mINTEGER(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = INTEGER;
    int _saveIndex;

    switch (LA(1)) {
      case '0':
        {
          mZERO(false);
          break;
        }
      case '1':
      case '2':
      case '3':
      case '4':
      case '5':
      case '6':
      case '7':
      case '8':
      case '9':
        {
          mNONZERO(false);
          {
            _loop38:
            do {
              if (((LA(1) >= '0' && LA(1) <= '9'))) {
                mDIGIT(false);
              } else {
                break _loop38;
              }

            } while (true);
          }
          break;
        }
      default:
        {
          throw new NoViableAltForCharException(
              (char) LA(1), getFilename(), getLine(), getColumn());
        }
    }
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#15
0
  /**
   * Create block comment from token.
   *
   * @param token Token object.
   * @return DetailAST with BLOCK_COMMENT type.
   */
  private static DetailAST createBlockCommentNode(Token token) {
    final DetailAST blockComment = new DetailAST();
    blockComment.initialize(TokenTypes.BLOCK_COMMENT_BEGIN, "/*");

    // column counting begins from 0
    blockComment.setColumnNo(token.getColumn() - 1);
    blockComment.setLineNo(token.getLine());

    final DetailAST blockCommentContent = new DetailAST();
    blockCommentContent.initialize(token);
    blockCommentContent.setType(TokenTypes.COMMENT_CONTENT);

    // column counting begins from 0
    // plus length of '/*'
    blockCommentContent.setColumnNo(token.getColumn() - 1 + 2);
    blockCommentContent.setLineNo(token.getLine());
    blockCommentContent.setText(token.getText());

    final DetailAST blockCommentClose = new DetailAST();
    blockCommentClose.initialize(TokenTypes.BLOCK_COMMENT_END, "*/");

    final Entry<Integer, Integer> linesColumns =
        countLinesColumns(token.getText(), token.getLine(), token.getColumn());
    blockCommentClose.setLineNo(linesColumns.getKey());
    blockCommentClose.setColumnNo(linesColumns.getValue());

    blockComment.addChild(blockCommentContent);
    blockComment.addChild(blockCommentClose);
    return blockComment;
  }
示例#16
0
  protected final void mEXPONENT(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = EXPONENT;
    int _saveIndex;

    {
      match('e');
    }
    {
      switch (LA(1)) {
        case '+':
          {
            match('+');
            break;
          }
        case '-':
          {
            match('-');
            break;
          }
        case '0':
        case '1':
        case '2':
        case '3':
        case '4':
        case '5':
        case '6':
        case '7':
        case '8':
        case '9':
          {
            break;
          }
        default:
          {
            throw new NoViableAltForCharException(
                (char) LA(1), getFilename(), getLine(), getColumn());
          }
      }
    }
    mINTEGER(false);
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#17
0
  protected final void mNONZERO(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = NONZERO;
    int _saveIndex;

    matchRange('1', '9');
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#18
0
  public final void mCONJ(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = CONJ;
    int _saveIndex;

    match("CONJ");
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#19
0
 /**
  * Create comment AST from token. Depending on token type SINGLE_LINE_COMMENT or
  * BLOCK_COMMENT_BEGIN is created.
  *
  * @param token Token object.
  * @return DetailAST of comment node.
  */
 private static DetailAST createCommentAstFromToken(Token token) {
   if (token.getType() == TokenTypes.SINGLE_LINE_COMMENT) {
     return createSlCommentNode(token);
   } else {
     return createBlockCommentNode(token);
   }
 }
示例#20
0
  public final String htmlDocument() throws RecognitionException, TokenStreamException {
    String charset;

    Token token1 = null;
    Token token2 = null;
    charset = null;

    try { // for error handling
      switch (LA(1)) {
        case META_CONTENT_TYPE:
          {
            {
              token1 = LT(1);
              match(META_CONTENT_TYPE);
            }
            charset = token1.getText();
            break;
          }
        case XML_ENCODING_DECL:
          {
            {
              token2 = LT(1);
              match(XML_ENCODING_DECL);
            }
            charset = token2.getText();
            break;
          }
        case EOF:
          {
            charset = null;
            break;
          }
        default:
          {
            throw new NoViableAltException(LT(1), getFilename());
          }
      }
    } catch (RecognitionException ex) {
      reportError(ex);
      consume();
      consumeUntil(_tokenSet_0);
    }
    return charset;
  }
示例#21
0
文件: Main.java 项目: yurius-r/decaf
  public static void main(String[] args) {
    try {
      CLI.parse(args, new String[0]);

      InputStream inputStream =
          args.length == 0 ? System.in : new java.io.FileInputStream(CLI.infile);

      if (CLI.target == CLI.SCAN) {
        DecafScanner lexer = new DecafScanner(new DataInputStream(inputStream));
        Token token;
        boolean done = false;
        while (!done) {
          try {
            for (token = lexer.nextToken();
                token.getType() != DecafParserTokenTypes.EOF;
                token = lexer.nextToken()) {
              String type = "";
              String text = token.getText();

              switch (token.getType()) {
                case DecafScannerTokenTypes.ID:
                  type = " IDENTIFIER";
                  break;
              }
              System.out.println(token.getLine() + type + " " + text);
            }
            done = true;
          } catch (Exception e) {
            // print the error:
            System.out.println(CLI.infile + " " + e);
            lexer.consume();
          }
        }
      } else if (CLI.target == CLI.PARSE || CLI.target == CLI.DEFAULT) {
        DecafScanner lexer = new DecafScanner(new DataInputStream(inputStream));
        DecafParser parser = new DecafParser(lexer);
        parser.program();
      }

    } catch (Exception e) {
      // print the error:
      System.out.println(CLI.infile + " " + e);
    }
  }
  protected final void mLETTER(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = LETTER;
    int _saveIndex;

    {
      match(_tokenSet_0);
    }
    _ttype =
        testLiteralsTable(new String(text.getBuffer(), _begin, text.length() - _begin), _ttype);
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#23
0
  public final String record() throws RecognitionException, TokenStreamException {
    String rec;

    Token r = null;

    rec = null;

    try { // for error handling
      {
        {
          r = LT(1);
          match(RECORD);

          rec = r.getText();
        }
        {
          switch (LA(1)) {
            case COMMA:
              {
                match(COMMA);
                break;
              }
            case EOF:
            case NEWLINE:
            case RECORD:
              {
                break;
              }
            default:
              {
                throw new NoViableAltException(LT(1), getFilename());
              }
          }
        }
      }
    } catch (RecognitionException ex) {
      reportError(ex);
      recover(ex, _tokenSet_2);
    }
    return rec;
  }
示例#24
0
  public final void mSTRING(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = STRING;
    int _saveIndex;
    Token escaped = null;
    char normal = '\0';
    StringBuilder lBuf = new StringBuilder();

    match('"');
    {
      _loop26:
      do {
        if ((LA(1) == '\\')) {
          mESC(true);
          escaped = _returnToken;
          lBuf.append(escaped.getText());
        } else if ((_tokenSet_1.member(LA(1)))) {
          {
            normal = LA(1);
            match(_tokenSet_1);
          }
          lBuf.append(normal);
        } else {
          break _loop26;
        }

      } while (true);
    }
    match('"');
    text.setLength(_begin);
    text.append(lBuf.toString());
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#25
0
  /**
   * Create single-line comment from token.
   *
   * @param token Token object.
   * @return DetailAST with SINGLE_LINE_COMMENT type.
   */
  private static DetailAST createSlCommentNode(Token token) {
    final DetailAST slComment = new DetailAST();
    slComment.setType(TokenTypes.SINGLE_LINE_COMMENT);
    slComment.setText("//");

    // column counting begins from 0
    slComment.setColumnNo(token.getColumn() - 1);
    slComment.setLineNo(token.getLine());

    final DetailAST slCommentContent = new DetailAST();
    slCommentContent.initialize(token);
    slCommentContent.setType(TokenTypes.COMMENT_CONTENT);

    // column counting begins from 0
    // plus length of '//'
    slCommentContent.setColumnNo(token.getColumn() - 1 + 2);
    slCommentContent.setLineNo(token.getLine());
    slCommentContent.setText(token.getText());

    slComment.addChild(slCommentContent);
    return slComment;
  }
示例#26
0
  public static void main(String[] args) {
    try {
      CLI.parse(args, new String[0]);

      InputStream inputStream =
          args.length == 0 ? System.in : new java.io.FileInputStream(CLI.infile);

      if (CLI.target == CLI.SCAN) {
        DecafScanner lexer = new DecafScanner(new DataInputStream(inputStream));
        Token token;
        boolean done = false;
        while (!done) {
          try {
            for (token = lexer.nextToken();
                token.getType() != DecafParserTokenTypes.EOF;
                token = lexer.nextToken()) {
              String type = "";
              String text = token.getText();

              switch (token.getType()) {
                case DecafScannerTokenTypes.ID:
                  type = " IDENTIFIER";
                  break;
                case DecafScannerTokenTypes.CHAR:
                  type = " CHARLITERAL";
                  break;
                case DecafScannerTokenTypes.TRUE:
                case DecafScannerTokenTypes.FALSE:
                  type = " BOOLEANLITERAL";
                  break;
                case DecafScannerTokenTypes.HEX:
                case DecafScannerTokenTypes.DECIMAL:
                  type = " INTLITERAL";
                  break;
                case DecafScannerTokenTypes.STRING:
                  type = " STRINGLITERAL";
                  break;
              }
              System.out.println(token.getLine() + type + " " + text);
            }
            done = true;
          } catch (Exception e) {
            // print the error:
            System.out.println(CLI.infile + " " + e);
            lexer.consume();
          }
        }
      } else if (CLI.target == CLI.PARSE || CLI.target == CLI.DEFAULT) {
        DecafScanner lexer = new DecafScanner(new DataInputStream(inputStream));
        DecafParser parser = new DecafParser(lexer, CLI.debug);
        // DecafParser parser = new DecafParser (lexer);
        parser.program();
      } else if (CLI.target == CLI.INTER) {
        DecafScanner lexer = new DecafScanner(new DataInputStream(inputStream));
        DecafParser parser = new DecafParser(lexer, CLI.debug);
        parser.program();
        IrNode irRoot = parser.getIrTree();
        SemanticChecker checker = new SemanticChecker(CLI.infile, CLI.debug);
        if (CLI.debug) System.out.println("--- checking -----");
        checker.checkProgram((IrClassDecl) irRoot);
      } else if (CLI.target == CLI.LOWIR) {
        DecafScanner lexer = new DecafScanner(new DataInputStream(inputStream));
        DecafParser parser = new DecafParser(lexer, CLI.debug);
        parser.program();
        IrNode irRoot = parser.getIrTree();
        SemanticChecker checker = new SemanticChecker(CLI.infile, CLI.debug);
        if (CLI.debug) System.out.println("--- checking -----");
        checker.checkProgram((IrClassDecl) irRoot);
        CodeGen codegen = new CodeGen(irRoot, CLI.debug);
        codegen.genLowIr();
        codegen.printLowIr();
      }
    } catch (Exception e) {
      // print the error:
      System.out.println(CLI.infile + " " + e);
    }
  }
示例#27
0
 @Override
 public void initialize(Token tok) {
   super.initialize(tok);
   lineNo = tok.getLine();
   columnNo = tok.getColumn() - 1; // expect columns to start @ 0
 }
示例#28
0
  public final void mID(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = ID;
    int _saveIndex;

    {
      int _cnt26 = 0;
      _loop26:
      do {
        switch (LA(1)) {
          case 'a':
          case 'b':
          case 'c':
          case 'd':
          case 'e':
          case 'f':
          case 'g':
          case 'h':
          case 'i':
          case 'j':
          case 'k':
          case 'l':
          case 'm':
          case 'n':
          case 'o':
          case 'p':
          case 'q':
          case 'r':
          case 's':
          case 't':
          case 'u':
          case 'v':
          case 'w':
          case 'x':
          case 'y':
          case 'z':
            {
              matchRange('a', 'z');
              break;
            }
          case 'A':
          case 'B':
          case 'C':
          case 'D':
          case 'E':
          case 'F':
          case 'G':
          case 'H':
          case 'I':
          case 'J':
          case 'K':
          case 'L':
          case 'M':
          case 'N':
          case 'O':
          case 'P':
          case 'Q':
          case 'R':
          case 'S':
          case 'T':
          case 'U':
          case 'V':
          case 'W':
          case 'X':
          case 'Y':
          case 'Z':
            {
              matchRange('A', 'Z');
              break;
            }
          default:
            {
              if (_cnt26 >= 1) {
                break _loop26;
              } else {
                throw new NoViableAltForCharException(
                    (char) LA(1), getFilename(), getLine(), getColumn());
              }
            }
        }
        _cnt26++;
      } while (true);
    }
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#29
0
  public final void mNUMBER(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = NUMBER;
    int _saveIndex;

    {
      switch (LA(1)) {
        case '-':
          {
            match('-');
            break;
          }
        case '0':
        case '1':
        case '2':
        case '3':
        case '4':
        case '5':
        case '6':
        case '7':
        case '8':
        case '9':
          {
            break;
          }
        default:
          {
            throw new NoViableAltForCharException(
                (char) LA(1), getFilename(), getLine(), getColumn());
          }
      }
    }
    mINTEGER(false);
    {
      if ((LA(1) == 'e')) {
        mEXPONENT(false);
      } else {
        {
          if ((LA(1) == '.')) {
            match('.');
            {
              int _cnt44 = 0;
              _loop44:
              do {
                if (((LA(1) >= '0' && LA(1) <= '9'))) {
                  mDIGIT(false);
                } else {
                  if (_cnt44 >= 1) {
                    break _loop44;
                  } else {
                    throw new NoViableAltForCharException(
                        (char) LA(1), getFilename(), getLine(), getColumn());
                  }
                }

                _cnt44++;
              } while (true);
            }
            {
              if ((LA(1) == 'e')) {
                mEXPONENT(false);
              } else {
              }
            }
          } else {
          }
        }
      }
    }
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }
示例#30
0
  protected final void mESC(boolean _createToken)
      throws RecognitionException, CharStreamException, TokenStreamException {
    int _ttype;
    Token _token = null;
    int _begin = text.length();
    _ttype = ESC;
    int _saveIndex;
    Token i = null;
    Token j = null;
    Token k = null;
    Token l = null;

    match('\\');
    {
      switch (LA(1)) {
        case 'n':
          {
            match('n');
            text.setLength(_begin);
            text.append("\n");
            break;
          }
        case 'r':
          {
            match('r');
            text.setLength(_begin);
            text.append("\r");
            break;
          }
        case 't':
          {
            match('t');
            text.setLength(_begin);
            text.append("\t");
            break;
          }
        case 'b':
          {
            match('b');
            text.setLength(_begin);
            text.append("\b");
            break;
          }
        case 'f':
          {
            match('f');
            text.setLength(_begin);
            text.append("\f");
            break;
          }
        case '"':
          {
            match('"');
            text.setLength(_begin);
            text.append("\"");
            break;
          }
        case '\'':
          {
            match('\'');
            text.setLength(_begin);
            text.append("\'");
            break;
          }
        case '/':
          {
            match('/');
            text.setLength(_begin);
            text.append("/");
            break;
          }
        case '\\':
          {
            match('\\');
            text.setLength(_begin);
            text.append("\\");
            break;
          }
        case 'u':
          {
            {
              int _cnt30 = 0;
              _loop30:
              do {
                if ((LA(1) == 'u')) {
                  match('u');
                } else {
                  if (_cnt30 >= 1) {
                    break _loop30;
                  } else {
                    throw new NoViableAltForCharException(
                        (char) LA(1), getFilename(), getLine(), getColumn());
                  }
                }

                _cnt30++;
              } while (true);
            }
            mHEX_DIGIT(true);
            i = _returnToken;
            mHEX_DIGIT(true);
            j = _returnToken;
            mHEX_DIGIT(true);
            k = _returnToken;
            mHEX_DIGIT(true);
            l = _returnToken;
            text.setLength(_begin);
            text.append(ParserUtil.hexToChar(i.getText(), j.getText(), k.getText(), l.getText()));
            break;
          }
        default:
          {
            throw new NoViableAltForCharException(
                (char) LA(1), getFilename(), getLine(), getColumn());
          }
      }
    }
    if (_createToken && _token == null && _ttype != Token.SKIP) {
      _token = makeToken(_ttype);
      _token.setText(new String(text.getBuffer(), _begin, text.length() - _begin));
    }
    _returnToken = _token;
  }