public void match(int c) throws MismatchedTokenException { if (input.LA(1) != c) { if (state.backtracking > 0) { state.failed = true; return; } MismatchedTokenException mte = new MismatchedTokenException(c, input); recover(mte); // don't really recover; just consume in lexer throw mte; } input.consume(); state.failed = false; }
public void matchRange(int a, int b) throws MismatchedRangeException { if (input.LA(1) < a || input.LA(1) > b) { if (state.backtracking > 0) { state.failed = true; return; } MismatchedRangeException mre = new MismatchedRangeException(a, b, input); recover(mre); throw mre; } input.consume(); state.failed = false; }
public void match(String s) throws MismatchedTokenException { int i = 0; while (i < s.length()) { if (input.LA(1) != s.charAt(i)) { if (state.backtracking > 0) { state.failed = true; return; } MismatchedTokenException mte = new MismatchedTokenException(s.charAt(i), input); recover(mte); throw mte; } i++; input.consume(); state.failed = false; } }
/** Return a token from this source; i.e., match a token on the char stream. */ public Token nextToken() { while (true) { state.token = null; state.channel = Token.DEFAULT_CHANNEL; state.tokenStartCharIndex = input.index(); state.tokenStartCharPositionInLine = input.getCharPositionInLine(); state.tokenStartLine = input.getLine(); state.text = null; if (input.LA(1) == CharStream.EOF) { Token eof = new CommonToken( (CharStream) input, Token.EOF, Token.DEFAULT_CHANNEL, input.index(), input.index()); eof.setLine(getLine()); eof.setCharPositionInLine(getCharPositionInLine()); return eof; } try { mTokens(); if (state.token == null) { emit(); } else if (state.token == Token.SKIP_TOKEN) { continue; } return state.token; } catch (NoViableAltException nva) { reportError(nva); recover(nva); // throw out current char and try again } catch (RecognitionException re) { reportError(re); // match() routine has already called recover() } } }
protected Token jjFillToken() { final Token t; final String curTokenImage; final int beginLine; final int endLine; final int beginColumn; final int endColumn; String im = jjstrLiteralImages[jjmatchedKind]; curTokenImage = (im == null) ? input_stream.GetImage() : im; beginLine = input_stream.getBeginLine(); beginColumn = input_stream.getBeginColumn(); endLine = input_stream.getEndLine(); endColumn = input_stream.getEndColumn(); t = Token.newToken(jjmatchedKind, curTokenImage); t.beginLine = beginLine; t.endLine = endLine; t.beginColumn = beginColumn; t.endColumn = endColumn; return t; }
public void reset() { super.reset(); // reset all recognizer state variables // wack Lexer state variables if (input != null) { input.seek(0); // rewind the input } if (state == null) { return; // no shared state work to do } state.token = null; state.type = Token.INVALID_TOKEN_TYPE; state.channel = Token.DEFAULT_CHANNEL; state.tokenStartCharIndex = -1; state.tokenStartCharPositionInLine = -1; state.tokenStartLine = -1; state.text = null; }
public void traceOut(String ruleName, int ruleIndex) { String inputSymbol = ((char) input.LT(1)) + " line=" + getLine() + ":" + getCharPositionInLine(); super.traceOut(ruleName, ruleIndex, inputSymbol); }
/** * Lexers can normally match any char in it's vocabulary after matching a token, so do the easy * thing and just kill a character and hope it all works out. You can instead use the rule * invocation stack to do sophisticated error recovery if you are in a fragment rule. */ public void recover(RecognitionException re) { // System.out.println("consuming char "+(char)input.LA(1)+" during recovery"); // re.printStackTrace(); input.consume(); }
/** Return the text matched so far for the current token or any text override. */ public String getText() { if (state.text != null) { return state.text; } return input.substring(state.tokenStartCharIndex, getCharIndex() - 1); }
/** What is the index of the current character of lookahead? */ public int getCharIndex() { return input.index(); }
public int getCharPositionInLine() { return input.getCharPositionInLine(); }
public int getLine() { return input.getLine(); }
public void matchAny() { input.consume(); }
public String getSourceName() { return input.getSourceName(); }
/** Get the next Token. */ public Token getNextToken() { Token matchedToken; int curPos = 0; EOFLoop: for (; ; ) { try { curChar = input_stream.BeginToken(); } catch (java.io.IOException e) { jjmatchedKind = 0; matchedToken = jjFillToken(); return matchedToken; } switch (curLexState) { case 0: jjmatchedKind = 0x7fffffff; jjmatchedPos = 0; curPos = jjMoveStringLiteralDfa0_0(); break; case 1: jjmatchedKind = 0x7fffffff; jjmatchedPos = 0; curPos = jjMoveStringLiteralDfa0_1(); break; case 2: jjmatchedKind = 0x7fffffff; jjmatchedPos = 0; curPos = jjMoveStringLiteralDfa0_2(); break; case 3: jjmatchedKind = 0x7fffffff; jjmatchedPos = 0; curPos = jjMoveStringLiteralDfa0_3(); break; } if (jjmatchedKind != 0x7fffffff) { if (jjmatchedPos + 1 < curPos) input_stream.backup(curPos - jjmatchedPos - 1); if ((jjtoToken[jjmatchedKind >> 6] & (1L << (jjmatchedKind & 077))) != 0L) { matchedToken = jjFillToken(); if (jjnewLexState[jjmatchedKind] != -1) curLexState = jjnewLexState[jjmatchedKind]; return matchedToken; } else { if (jjnewLexState[jjmatchedKind] != -1) curLexState = jjnewLexState[jjmatchedKind]; continue EOFLoop; } } int error_line = input_stream.getEndLine(); int error_column = input_stream.getEndColumn(); String error_after = null; boolean EOFSeen = false; try { input_stream.readChar(); input_stream.backup(1); } catch (java.io.IOException e1) { EOFSeen = true; error_after = curPos <= 1 ? "" : input_stream.GetImage(); if (curChar == '\n' || curChar == '\r') { error_line++; error_column = 0; } else error_column++; } if (!EOFSeen) { input_stream.backup(1); error_after = curPos <= 1 ? "" : input_stream.GetImage(); } throw new TokenMgrError( EOFSeen, curLexState, error_line, error_column, error_after, curChar, TokenMgrError.LEXICAL_ERROR); } }