private Expression parsePrimaryExpression() { TokenType type = myLexer.nextToken(); if (type == TokenType.LPAREN) { Expression expression = parseExpression(); tokenExpected(TokenType.RPAREN); return expression; } if (type == TokenType.IDENTIFIER) { String name = myLexer.getToken(); if (myLexer.nextToken() == TokenType.LPAREN) { List<Expression> arguments = parseExpressionList(); tokenExpected(TokenType.RPAREN); return new FunctionCallExpression(name, arguments); } myLexer.pushBack(); return new VariableExpression(name); } if (type == TokenType.INT) { try { int value = Integer.parseInt(myLexer.getToken()); return new IntegerLiteralExpression(value); } catch (NumberFormatException e) { throw new ParsingException(myLexer.getToken() + " is not an integer"); } } throw new ParsingException( "'(' or number or identifier expected but " + myLexer.getToken() + " found"); }
public Parser(Lexer lexer, String[] keywords) { this.lexer = lexer; for (String keyword : keywords) { this.keywords.add(keyword); } token = lexer.nextToken(); if (token.type == TokenType.EOF) lookAhead = token; else lookAhead = lexer.nextToken(); }
private Expression parseMultExpression() { Expression expression = parsePrimaryExpression(); TokenType type = myLexer.nextToken(); while (type == TokenType.MULT || type == TokenType.DIV) { Expression next = parsePrimaryExpression(); expression = new BinaryExpression( type == TokenType.MULT ? Operations.MULT : Operations.DIV, expression, next); type = myLexer.nextToken(); } myLexer.pushBack(); return expression; }
private Expression parseAddExpression() { Expression expression = parseMultExpression(); TokenType type = myLexer.nextToken(); while (type == TokenType.PLUS || type == TokenType.MINUS) { Expression next = parseMultExpression(); expression = new BinaryExpression( type == TokenType.PLUS ? Operations.ADD : Operations.SUB, expression, next); type = myLexer.nextToken(); } myLexer.pushBack(); return expression; }
private void factor() { token = lexer.nextToken(); if (token == Lexer.LEFT) { expression(); if (token != Lexer.RIGHT) { throw new MalformedExpressionException( String.format("')' instead of <%s> expected.", token)); } else { token = lexer.nextToken(); } } else { condition(); } }
private Token tokenpaste(Token arg1, Token arg2, PreprocessorMacro macro) { if (arg1 == null) { return arg2; } if (arg2 == null) { return arg1; } final char[] image1 = arg1.getCharImage(); final char[] image2 = arg2.getCharImage(); final int l1 = image1.length; final int l2 = image2.length; final char[] image = new char[l1 + l2]; System.arraycopy(image1, 0, image, 0, l1); System.arraycopy(image2, 0, image, l1, l2); Lexer lex = new Lexer(image, fLexOptions, ILexerLog.NULL, null); try { Token t1 = lex.nextToken(); Token t2 = lex.nextToken(); if (t1.getType() != IToken.tEND_OF_INPUT && t2.getType() == IToken.tEND_OF_INPUT) { t1.setOffset(arg1.getOffset(), arg2.getEndOffset()); return t1; } } catch (OffsetLimitReachedException e) { } handleProblem(IProblem.PREPROCESSOR_MACRO_PASTING_ERROR, macro.getNameCharArray()); return null; }
public static void main(String argv[]) throws java.io.IOException { for (int i = 0; i < argv.length; ++i) { String filename = argv[i]; if (argv.length > 1) System.out.println("***Processing: " + filename); ErrorMsg.ErrorMsg errorMsg = new ErrorMsg.ErrorMsg(filename); java.io.InputStream inp = new java.io.FileInputStream(filename); Lexer lexer = new Yylex(inp, errorMsg); java_cup.runtime.Symbol tok; do { String extra = ""; tok = lexer.nextToken(); switch (tok.sym) { case sym.ID: extra = "\t$" + tok.value; break; case sym.INT: extra = "\t#" + tok.value; break; case sym.STRING: extra = " \"" + tok.value + "\""; break; } System.out.println(symnames[tok.sym] + " " + tok.left + extra); } while (tok.sym != sym.EOF); inp.close(); } }
protected void nextToken() { lastToken = token; token = lookAhead; if (token.type == TokenType.ERROR) error(token.text); if (token.type == TokenType.EOF) lookAhead = token; else lookAhead = lexer.nextToken(); }
public static void main(String[] args) { String inFile = "C:\\Users\\Sukoon\\Documents\\NetBeansProjects\\cdproj\\src\\Sample.in"; String outFile = "C:\\Users\\Sukoon\\Desktop\\Sample.out"; if (args.length > 1) { inFile = args[0]; outFile = args[1]; } Lexer lexer = new Lexer(inFile); try { BufferedWriter writer = new BufferedWriter(new FileWriter(outFile)); Token t; while ((t = lexer.nextToken()) != null) { writer.write(t.toString()); writer.newLine(); } writer.close(); System.out.println("Done tokenizing file: " + inFile); System.out.println("Output written in file: " + outFile); } catch (IOException e) { e.printStackTrace(); } }
/** * Method for tracking macro expansions. * * @since 5.0 */ public void expand( String beforeExpansion, MacroExpansionTracker tracker, String filePath, int lineNumber, boolean protectDefinedConstructs) { fImplicitMacroExpansions.clear(); fImageLocationInfos.clear(); fFixedInput = beforeExpansion.toCharArray(); fFixedCurrentFilename = filePath; fFixedLineNumber = lineNumber; Lexer lexer = new Lexer(fFixedInput, fLexOptions, fLog, this); try { tracker.start(lexer); Token identifier = lexer.nextToken(); if (identifier.getType() != IToken.tIDENTIFIER) { tracker.fail(); return; } PreprocessorMacro macro = fDictionary.get(identifier.getCharImage()); if (macro == null) { tracker.fail(); return; } lexer.nextToken(); fStartOffset = identifier.getOffset(); fEndOffset = identifier.getEndOffset(); fCompletionMode = false; IdentityHashMap<PreprocessorMacro, PreprocessorMacro> forbidden = new IdentityHashMap<PreprocessorMacro, PreprocessorMacro>(); // setup input sequence TokenSource input = new TokenSource(lexer); TokenList firstExpansion = new TokenList(); firstExpansion.append(new ExpansionBoundary(macro, true)); expandOne(identifier, macro, forbidden, input, firstExpansion, tracker); firstExpansion.append(new ExpansionBoundary(macro, false)); input.prepend(firstExpansion); TokenList result = expandAll(input, forbidden, protectDefinedConstructs, tracker); tracker.finish(result, fEndOffset); } catch (OffsetLimitReachedException e) { } }
protected void acceptIdentifier(String text) { if (identifierEquals(text)) { lexer.nextToken(); } else { setErrorEndPos(lexer.pos()); throw new ParserException("syntax error, expect " + text + ", actual " + lexer.token()); } }
private void tokenExpected(TokenType type, String text) { myLexer.nextToken(); if (type != myLexer.getTokenType()) { throw new ParsingException(type.getName() + " expected but " + myLexer.getToken() + " found"); } if (text != null && !text.equals(myLexer.getToken())) { throw new ParsingException(text + " expected but " + myLexer.getToken() + " found"); } }
private List<Expression> parseExpressionList() { List<Expression> expressions = new ArrayList<Expression>(); do { Expression expression = parseExpression(); expressions.add(expression); } while (myLexer.nextToken() == TokenType.COMMA); myLexer.pushBack(); return expressions; }
/** * Main function. Takes OQL query string as command line parameter and prints token stream version * of that query to stdout. * * @param args Pass an OQL query string on the command line. */ public static void main(String args[]) { Hashtable tokenTypes = new Hashtable(); tokenTypes.put(new Integer(END_OF_QUERY), "END_OF_QUERY"); tokenTypes.put(new Integer(KEYWORD_SELECT), "KEYWORD_SELECT"); tokenTypes.put(new Integer(IDENTIFIER), "IDENTIFIER"); tokenTypes.put(new Integer(KEYWORD_AS), "KEYWORD_AS"); tokenTypes.put(new Integer(COLON), "COLON"); tokenTypes.put(new Integer(KEYWORD_FROM), "KEYWORD_FROM"); tokenTypes.put(new Integer(KEYWORD_IN), "KEYWORD_IN"); tokenTypes.put(new Integer(KEYWORD_WHERE), "KEYWORD_WHERE"); tokenTypes.put(new Integer(KEYWORD_OR), "KEYWORD_OR"); tokenTypes.put(new Integer(KEYWORD_AND), "KEYWORD_AND"); tokenTypes.put(new Integer(EQUAL), "EQUAL"); tokenTypes.put(new Integer(NOT_EQUAL), "NOT_EQUAL"); tokenTypes.put(new Integer(KEYWORD_LIKE), "KEYWORD_LIKE"); tokenTypes.put(new Integer(LT), "LT"); tokenTypes.put(new Integer(LTE), "LTE"); tokenTypes.put(new Integer(GT), "GT"); tokenTypes.put(new Integer(GTE), "GTE"); tokenTypes.put(new Integer(PLUS), "PLUS"); tokenTypes.put(new Integer(MINUS), "MINUS"); tokenTypes.put(new Integer(CONCAT), "CONCAT"); tokenTypes.put(new Integer(TIMES), "TIMES"); tokenTypes.put(new Integer(DIVIDE), "DIVIDE"); tokenTypes.put(new Integer(KEYWORD_MOD), "KEYWORD_MOD"); tokenTypes.put(new Integer(KEYWORD_ABS), "KEYWORD_ABS"); tokenTypes.put(new Integer(KEYWORD_NOT), "KEYWORD_NOT"); tokenTypes.put(new Integer(LPAREN), "LPAREN"); tokenTypes.put(new Integer(RPAREN), "RPAREN"); tokenTypes.put(new Integer(DOLLAR), "DOLLAR"); tokenTypes.put(new Integer(KEYWORD_NIL), "KEYWORD_NIL"); tokenTypes.put(new Integer(KEYWORD_UNDEFINED), "KEYWORD_UNDEFINED"); tokenTypes.put(new Integer(BOOLEAN_LITERAL), "BOOLEAN_LITERAL"); tokenTypes.put(new Integer(LONG_LITERAL), "LONG_LITERAL"); tokenTypes.put(new Integer(DOUBLE_LITERAL), "DOUBLE_LITERAL"); tokenTypes.put(new Integer(CHAR_LITERAL), "CHAR_LITERAL"); tokenTypes.put(new Integer(STRING_LITERAL), "STRING_LITERAL"); tokenTypes.put(new Integer(DATE_LITERAL), "DATE_LITERAL"); tokenTypes.put(new Integer(TIME_LITERAL), "TIME_LITERAL"); tokenTypes.put(new Integer(TIMESTAMP_LITERAL), "TIMESTAMP_LITERAL"); tokenTypes.put(new Integer(KEYWORD_BETWEEN), "KEYWORD_BETWEEN"); Lexer lexer = new Lexer(args[0]); while (lexer.hasMoreTokens()) { try { Token theToken = lexer.nextToken(); String tokenType = (String) tokenTypes.get(new Integer(theToken.getTokenType())); System.out.println(tokenType + " : " + theToken.getTokenValue()); } catch (Exception e) { System.out.println(e.toString()); e.printStackTrace(System.out); break; } } }
private BooleanExpression parseBooleanExpression() { Expression left = parseExpression(); TokenType type = myLexer.nextToken(); ComparisonOperation operation = BOOLEAN_OPERATIONS.get(type); if (operation == null) { throw new ParsingException( "'<', '>', '<=', '>=', '!=' or '==' expected but " + myLexer.getToken() + " found"); } Expression right = parseExpression(); return new ComparisonExpression(operation, left, right); }
private Statement parseIf() { tokenExpected(TokenType.LPAREN); BooleanExpression condition = parseBooleanExpression(); tokenExpected(TokenType.RPAREN); Statement thenClause = parseStatement(); Statement elseClause = null; if (myLexer.nextToken() == TokenType.IDENTIFIER && myLexer.getToken().equals("else")) { elseClause = parseStatement(); } else { myLexer.pushBack(); } return new IfStatement(condition, thenClause, elseClause); }
private StatementsNode parseStatements(boolean insideGroup) { List<Statement> statements = new ArrayList<Statement>(); do { Statement statement = parseStatement(); statements.add(statement); TokenType type = myLexer.nextToken(); myLexer.pushBack(); if (type == TokenType.EOF || insideGroup && type == TokenType.RBRACE) { break; } } while (true); return new StatementsNode(statements); }
private Statement parseStatement() { TokenType type = myLexer.nextToken(); if (type == TokenType.IDENTIFIER) { String token = myLexer.getToken(); if (token.equals("print")) { Expression expression = parseExpression(); tokenExpected(TokenType.SEMICOLON); return new PrintStatement(expression); } else if (token.equals("return")) { Expression expression = parseExpression(); tokenExpected(TokenType.SEMICOLON); return new ReturnStatement(expression); } else if (token.equals("fun")) { return parseFunctionDeclaration(); } else if (token.equals("if")) { return parseIf(); } else if (token.equals("while")) { return parseWhile(); } if (myLexer.nextToken() == TokenType.ASSIGN) { Expression expression = parseExpression(); tokenExpected(TokenType.SEMICOLON); return new AssignmentStatement(token, expression); } myLexer.pushBack(); myLexer.pushBack(token, TokenType.IDENTIFIER); } else if (type == TokenType.LBRACE) { StatementsNode statement = parseStatements(true); tokenExpected(TokenType.RBRACE); return statement; } else { myLexer.pushBack(); } Expression expression = parseExpression(); tokenExpected(TokenType.SEMICOLON); return new ExpressionStatement(expression); }
private void value() { if (token == Lexer.VARIABLE || token == Lexer.NUMBER) { root = NodeFactory.createTerminal(token, lexer.getValue()); if (token == Lexer.VARIABLE && allowedIdentifiers != null) { if (!allowedIdentifiers.contains(root.getSymbol())) throw new MalformedExpressionException( String.format("Unknown identifier '%s'", root.getSymbol())); } token = lexer.nextToken(); } else { throw new MalformedExpressionException( String.format("Value instead of <%s> expected.", token)); } }
public void accept(Token token) { if (lexer.token() == token) { lexer.nextToken(); } else { setErrorEndPos(lexer.pos()); throw new ParserException( "syntax error, expect " + token + ", actual " + lexer.token() + " " + lexer.stringVal() + ", pos " + this.lexer.pos()); } }
public void test_lexer() throws Exception { String sql = "SELECT * FROM T WHERE F1 = ? ORDER BY F2"; Lexer lexer = new Lexer(sql); for (; ; ) { lexer.nextToken(); Token tok = lexer.token(); if (tok == Token.IDENTIFIER) { System.out.println(tok.name() + "\t\t" + lexer.stringVal()); } else if (tok == Token.LITERAL_INT) { System.out.println(tok.name() + "\t\t" + lexer.numberString()); } else { System.out.println(tok.name() + "\t\t\t" + tok.name); } if (tok == Token.EOF) { break; } } }
@SuppressWarnings("unchecked") private void condition() { value(); if (token == Lexer.GREATER || token == Lexer.GREATEROREQUAL || token == Lexer.LESS || token == Lexer.LESSOREQUAL || token == Lexer.EQUAL || token == Lexer.NOTEQUAL) { NonTerminal condition = NodeFactory.createNonTerminal(token); condition.setLeft(root); token = lexer.nextToken(); value(); condition.setRight(root); root = condition; } else { throw new MalformedExpressionException( String.format("Conditional operator instead of <%s> expected.", token)); } }
protected String as() { String alias = null; if (lexer.token() == Token.AS) { lexer.nextToken(); if (lexer.token() == Token.LITERAL_ALIAS) { alias = '"' + lexer.stringVal() + '"'; lexer.nextToken(); } else if (lexer.token() == Token.IDENTIFIER) { alias = lexer.stringVal(); lexer.nextToken(); } else if (lexer.token() == Token.LITERAL_CHARS) { alias = "'" + lexer.stringVal() + "'"; lexer.nextToken(); } else { switch (lexer.token()) { case KEY: case INDEX: case CASE: case MODEL: case PCTFREE: case INITRANS: case MAXTRANS: case SEGMENT: case CREATION: case IMMEDIATE: case DEFERRED: case STORAGE: case NEXT: case MINEXTENTS: case MAXEXTENTS: case MAXSIZE: case PCTINCREASE: case FLASH_CACHE: case CELL_FLASH_CACHE: case KEEP: case NONE: case LOB: case STORE: case ROW: case CHUNK: case CACHE: case NOCACHE: case LOGGING: case NOCOMPRESS: case KEEP_DUPLICATES: case EXCEPTIONS: case PURGE: case INITIALLY: case END: case COMMENT: case ENABLE: case DISABLE: case SEQUENCE: case USER: case ANALYZE: case OPTIMIZE: case GRANT: alias = lexer.stringVal(); lexer.nextToken(); return alias; case QUES: alias = "?"; lexer.nextToken(); default: break; } } if (alias != null) { while (lexer.token() == Token.DOT) { lexer.nextToken(); alias += ('.' + lexer.token().name()); lexer.nextToken(); } return alias; } if (lexer.token() == Token.LPAREN) { return null; } throw new ParserException("Error : " + lexer.token()); } if (lexer.token() == Token.LITERAL_ALIAS) { alias = '"' + lexer.stringVal() + '"'; lexer.nextToken(); } else if (lexer.token() == Token.IDENTIFIER) { alias = lexer.stringVal(); lexer.nextToken(); } else if (lexer.token() == Token.LITERAL_CHARS) { alias = "'" + lexer.stringVal() + "'"; lexer.nextToken(); } else if (lexer.token() == Token.CASE) { alias = lexer.token.name(); lexer.nextToken(); } switch (lexer.token()) { case KEY: alias = lexer.token().name(); lexer.nextToken(); return alias; default: break; } return alias; }
private Statement parseFunctionDeclaration() { tokenExpected(TokenType.IDENTIFIER); String name = myLexer.getToken(); tokenExpected(TokenType.LPAREN); List<String> parameters = new ArrayList<String>(); do { tokenExpected(TokenType.IDENTIFIER); parameters.add(myLexer.getToken()); } while (myLexer.nextToken() == TokenType.COMMA); myLexer.pushBack(); tokenExpected(TokenType.RPAREN); Statement body = parseStatement(); return new FunctionDeclaration(name, parameters, body); }