public Token nextToken() throws TokenStreamException { Token theRetToken = null; tryAgain: for (; ; ) { Token _token = null; int _ttype = Token.INVALID_TYPE; resetText(); try { // for char stream error handling try { // for lexical error handling switch (LA(1)) { case '\t': case '\n': case '\r': case ' ': { mWS(true); theRetToken = _returnToken; break; } case 'U': { mUE(true); theRetToken = _returnToken; break; } case 'I': { mIE(true); theRetToken = _returnToken; break; } case 'R': { mRW(true); theRetToken = _returnToken; break; } case 'C': { mCONJ(true); theRetToken = _returnToken; break; } default: { if (LA(1) == EOF_CHAR) { uponEOF(); _returnToken = makeToken(Token.EOF_TYPE); } else { throw new NoViableAltForCharException( (char) LA(1), getFilename(), getLine(), getColumn()); } } } if (_returnToken == null) continue tryAgain; // found SKIP token _ttype = _returnToken.getType(); _ttype = testLiteralsTable(_ttype); _returnToken.setType(_ttype); return _returnToken; } catch (RecognitionException e) { throw new TokenStreamRecognitionException(e); } } catch (CharStreamException cse) { if (cse instanceof CharStreamIOException) { throw new TokenStreamIOException(((CharStreamIOException) cse).io); } else { throw new TokenStreamException(cse.getMessage()); } } } }
public Token nextToken() throws TokenStreamException { Token theRetToken = null; tryAgain: for (; ; ) { Token _token = null; int _ttype = Token.INVALID_TYPE; resetText(); try { // for char stream error handling try { // for lexical error handling switch (LA(1)) { case '\t': case '\n': case '\r': case ' ': { mWS(true); theRetToken = _returnToken; break; } case '=': { mEQUALS(true); theRetToken = _returnToken; break; } case '(': { mLPAREN(true); theRetToken = _returnToken; break; } case ')': { mRPAREN(true); theRetToken = _returnToken; break; } case '*': { mSTAR(true); theRetToken = _returnToken; break; } case '+': { mPLUS(true); theRetToken = _returnToken; break; } case ';': { mSEMI(true); theRetToken = _returnToken; break; } case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { mINT(true); theRetToken = _returnToken; break; } case 'A': case 'B': case 'C': case 'D': case 'E': case 'F': case 'G': case 'H': case 'I': case 'J': case 'K': case 'L': case 'M': case 'N': case 'O': case 'P': case 'Q': case 'R': case 'S': case 'T': case 'U': case 'V': case 'W': case 'X': case 'Y': case 'Z': case 'a': case 'b': case 'c': case 'd': case 'e': case 'f': case 'g': case 'h': case 'i': case 'j': case 'k': case 'l': case 'm': case 'n': case 'o': case 'p': case 'q': case 'r': case 's': case 't': case 'u': case 'v': case 'w': case 'x': case 'y': case 'z': { mID(true); theRetToken = _returnToken; break; } default: { if (LA(1) == EOF_CHAR) { uponEOF(); _returnToken = makeToken(Token.EOF_TYPE); } else { throw new NoViableAltForCharException( (char) LA(1), getFilename(), getLine(), getColumn()); } } } if (_returnToken == null) continue tryAgain; // found SKIP token _ttype = _returnToken.getType(); _ttype = testLiteralsTable(_ttype); _returnToken.setType(_ttype); return _returnToken; } catch (RecognitionException e) { throw new TokenStreamRecognitionException(e); } } catch (CharStreamException cse) { if (cse instanceof CharStreamIOException) { throw new TokenStreamIOException(((CharStreamIOException) cse).io); } else { throw new TokenStreamException(cse.getMessage()); } } } }
public Token nextToken() throws TokenStreamException { Token theRetToken = null; tryAgain: for (; ; ) { Token _token = null; int _ttype = Token.INVALID_TYPE; resetText(); try { // for char stream error handling try { // for lexical error handling switch (LA(1)) { case '(': { mLPAREN(true); theRetToken = _returnToken; break; } case ')': { mRPAREN(true); theRetToken = _returnToken; break; } case '{': { mLCURLY(true); theRetToken = _returnToken; break; } case '}': { mRCURLY(true); theRetToken = _returnToken; break; } case '[': { mLBRACK(true); theRetToken = _returnToken; break; } case ']': { mRBRACK(true); theRetToken = _returnToken; break; } case ',': { mCOMMA(true); theRetToken = _returnToken; break; } case ':': { mCOLON(true); theRetToken = _returnToken; break; } case 't': { mTRUE(true); theRetToken = _returnToken; break; } case 'f': { mFALSE(true); theRetToken = _returnToken; break; } case 'n': { mNULL(true); theRetToken = _returnToken; break; } case '-': case '0': case '1': case '2': case '3': case '4': case '5': case '6': case '7': case '8': case '9': { mNUMBER(true); theRetToken = _returnToken; break; } case '\t': case '\n': case '\u000c': case '\r': case ' ': { mWS(true); theRetToken = _returnToken; break; } case '#': { mSL_COMMENT(true); theRetToken = _returnToken; break; } default: if ((LA(1) == '"') && (_tokenSet_0.member(LA(2)))) { mSTRING(true); theRetToken = _returnToken; } else if ((LA(1) == '"') && (true)) { mQUOTES(true); theRetToken = _returnToken; } else { if (LA(1) == EOF_CHAR) { uponEOF(); _returnToken = makeToken(Token.EOF_TYPE); } else { throw new NoViableAltForCharException( (char) LA(1), getFilename(), getLine(), getColumn()); } } } if (_returnToken == null) continue tryAgain; // found SKIP token _ttype = _returnToken.getType(); _ttype = testLiteralsTable(_ttype); _returnToken.setType(_ttype); return _returnToken; } catch (RecognitionException e) { throw new TokenStreamRecognitionException(e); } } catch (CharStreamException cse) { if (cse instanceof CharStreamIOException) { throw new TokenStreamIOException(((CharStreamIOException) cse).io); } else { throw new TokenStreamException(cse.getMessage()); } } } }
public Token nextToken() throws TokenStreamException { Token theRetToken = null; tryAgain: for (; ; ) { Token _token = null; int _ttype = Token.INVALID_TYPE; resetText(); try { // for char stream error handling try { // for lexical error handling switch (LA(1)) { case '\t': case ' ': { mWS(true); theRetToken = _returnToken; break; } case '(': { mLPAREN(true); theRetToken = _returnToken; break; } case ')': { mRPAREN(true); theRetToken = _returnToken; break; } case '!': { mNEQUAL(true); theRetToken = _returnToken; break; } default: if ((LA(1) == '=') && (LA(2) == '=')) { mEEQUAL(true); theRetToken = _returnToken; } else if ((LA(1) == '"') && ((LA(2) >= '\u0003' && LA(2) <= '\u00ff'))) { mSTRING(true); theRetToken = _returnToken; } else if ((LA(1) == '=') && (true)) { mEQUAL(true); theRetToken = _returnToken; } else if ((LA(1) == '"') && (true)) { mQUOTE(true); theRetToken = _returnToken; } else if ((_tokenSet_0.member(LA(1)))) { mFIELDTYPE(true); theRetToken = _returnToken; } else { if (LA(1) == EOF_CHAR) { uponEOF(); _returnToken = makeToken(Token.EOF_TYPE); } else { throw new NoViableAltForCharException( LA(1), getFilename(), getLine(), getColumn()); } } } if (_returnToken == null) continue tryAgain; // found SKIP token _ttype = _returnToken.getType(); _returnToken.setType(_ttype); return _returnToken; } catch (RecognitionException e) { throw new TokenStreamRecognitionException(e); } } catch (CharStreamException cse) { if (cse instanceof CharStreamIOException) { throw new TokenStreamIOException(((CharStreamIOException) cse).io); } else { throw new TokenStreamException(cse.getMessage()); } } } }