public FelNode parse(String exp) { if (exp == null || "".equals(exp)) { return null; } ByteArrayInputStream is = new ByteArrayInputStream(exp.getBytes()); ANTLRInputStream input = null; try { input = new ANTLRInputStream(is); } catch (IOException e) { throw new ParseException(FelException.getCauseMessage(e), e); } FelLexer lexer = new FelLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); FelParser parser = new FelParser(tokens); parser.setTreeAdaptor(adaptor); ParserRuleReturnScope r = null; try { r = parser.program(); } catch (RecognitionException e) { throw new ParseException(e.getMessage(), e); } if (r != null) { Object tree = r.getTree(); if (tree instanceof FelNode) { initFun((FelNode) tree); return (FelNode) tree; } } return null; }
@Override public void run() { for (File file : DirList.listDirectory(m_inputDir)) { PBFile window = null; try { window = WindowFactory.getInstance().createWindow(file); } catch (IOException e) { e.printStackTrace(); } catch (RecognitionException e) { e.printStackTrace(); } if (window != null) buffer.add(window); long now = QDateTime.currentMSecsSinceEpoch(); if (now - lastEmitted > 200 && !buffer.isEmpty()) { // push the queue every 200ms newDataVectorReady.emit(buffer); buffer = new Vector<PBFile>(); lastEmitted = now; } } if (!buffer.isEmpty()) { newDataVectorReady.emit(buffer); } }
/** * Test method for {@link * org.drools.rule.builder.dialect.java.JavaExprAnalyzer#analyzeBlock(java.lang.String, * java.util.Set[])}. */ @Test public void testAnalyzeBlock() { JavaExprAnalyzer analyzer = new JavaExprAnalyzer(); String codeBlock = "int x;\n" + "Cheese cheese = new Cheese();\n" + "for( Iterator it = list.iterator(); it.hasNext(); ) {\n" + " int shouldNotBeIncluded = 1;\n" + "}\n" + "{\n" + " String anotherNonTopLevelVar = \"test\";\n" + "}\n" + "double thisIsAGoodVar = 0;\n" + "method();\n"; try { JavaAnalysisResult analysis = analyzer.analyzeBlock( codeBlock, new BoundIdentifiers( new HashMap<String, Class<?>>(), new HashMap<String, Class<?>>())); Set<String> vars = analysis.getLocalVariables(); assertEquals(3, vars.size()); assertTrue(vars.contains("x")); assertTrue(vars.contains("cheese")); assertTrue(vars.contains("thisIsAGoodVar")); } catch (RecognitionException e) { e.printStackTrace(); fail("Not supposed to raise exception: " + e.getMessage()); } }
public void testErrors(String[] pairs, boolean printTree) { for (int i = 0; i < pairs.length; i += 2) { String input = pairs[i]; String expect = pairs[i + 1]; ErrorQueue equeue = new ErrorQueue(); Grammar g = null; try { String[] lines = input.split("\n"); String fileName = getFilenameFromFirstLineOfGrammar(lines[0]); g = new Grammar(fileName, input, equeue); } catch (org.antlr.runtime.RecognitionException re) { re.printStackTrace(System.err); } String actual = equeue.toString(g.tool); System.err.println(actual); String msg = input; msg = msg.replaceAll("\n", "\\\\n"); msg = msg.replaceAll("\r", "\\\\r"); msg = msg.replaceAll("\t", "\\\\t"); // ignore error number expect = stripErrorNum(expect); actual = stripErrorNum(actual); assertEquals("error in: " + msg, expect, actual); } }
@Test public void testTheKitty() { try { runTestWithString("sduhfdkufdhkjs;"); fail("Should have thrownb an exception"); } catch (RecognitionException e) { System.out.println("Expected " + e.toString()); } catch (Exception e) { System.out.println("UnExpected " + e.toString()); fail(e.getMessage()); } }
public boolean compile(String str, VMData data) { file = str; // FIXME: int result = 0; // ANTLRStringStream input = new ANTLRStringStream(str); StackBasicLexer lexer = new StackBasicLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); StackBasicParser parser = new StackBasicParser(tokens); // Share data here parser.driver = this; try { parser.compilationUnit(); } catch (RecognitionException e) { e.printStackTrace(); result = -1; } finally { parser.driver = null; } if (result != 0) { return false; } while (!state_stack.empty()) { State state = state_stack.peek(); switch (state.getState()) { case State.STATE_IF: error("if does not match endif"); break; case State.STATE_FOR: error("for does not match next"); state.setStart(null); state.setEnd(null); state.setStep(null); break; case State.STATE_WHILE: error("while does not match wend"); break; } state_stack.pop(); } // FIXME: VMCode code = statement.get(statement.size() - 1); if (code.getOp() != VMCode.VM_HALT) { OpHalt(); } int code_size = LabelSetting(); // FIXME:CraeteData // from statements to data (dump to byte array) CreateData(data, code_size); return error_count == 0; }
public static void main(String args[]) throws Exception { exprLexer lex = new exprLexer( new ANTLRFileStream("/Users/dannluciano/Sources/MyLanguage/input.txt", "UTF8")); CommonTokenStream tokens = new CommonTokenStream(lex); exprParser g = new exprParser(tokens, 49100, null); try { g.program(); } catch (RecognitionException e) { e.printStackTrace(); } }
@Test public void testBaseUid() { CharStream stream = new ANTLRStringStream("Tellurium"); UdlLexer lexer = new UdlLexer(stream); TokenStream tokenStream = new CommonTokenStream(lexer); UdlParser parser = new UdlParser(tokenStream); try { MetaData data = parser.uid(); assertNotNull(data); assertEquals("Tellurium", data.getId()); } catch (RecognitionException e) { fail(e.getMessage()); } }
public static void parse(String filename) throws Exception { DotAlphaLexer lex = new DotAlphaLexer(new ANTLRFileStream(filename)); CommonTokenStream tokens = new CommonTokenStream(lex); DotAlphaParser parser = new DotAlphaParser(tokens); try { parser.dotAlpha(); } catch (RecognitionException e) { e.printStackTrace(); } actions = parser.getActions(); alphas = parser.getAlphas(); }
@Override public Object call(ExecutionContext context, Object self, Object... args) { // 15.3.2.1 int numArgs = args.length; String body = ""; if (numArgs > 0) { body = Types.toString(context, args[numArgs - 1]); } StringBuffer formalParams = new StringBuffer(); boolean first = true; Set<String> seenParams = new HashSet<>(); boolean duplicateFormalParams = false; for (int i = 0; i < numArgs - 1; ++i) { if (!first) { formalParams.append(","); } String param = Types.toString(context, args[i]); if (seenParams.contains(param)) { duplicateFormalParams = true; } seenParams.add(param); formalParams.append(param); first = false; } StringBuffer code = new StringBuffer(); code.append("function(" + formalParams.toString() + "){\n"); code.append(body); code.append("}"); try { FunctionDescriptor descriptor = parseFunction(context, code.toString()); JSCompiler compiler = context.getGlobalObject().getCompiler(); JSFunction function = compiler.compileFunction( context, descriptor.getFormalParameters(), descriptor.getBlock(), false); if (function.isStrict() && duplicateFormalParams) { throw new ThrowException( context, context.createSyntaxError("duplicate formal parameters in function definition")); } function.setPrototype(getPrototype()); return function; } catch (RecognitionException e) { throw new ThrowException(context, context.createSyntaxError(e.getMessage())); } }
public static void main(String args[]) throws Exception { ChronosLexer lex = new ChronosLexer( new ANTLRFileStream( "/Users/shannonlee/PLT_Team20/SHANNONTEST/__Test___input.txt", "UTF8")); CommonTokenStream tokens = new CommonTokenStream(lex); ChronosParser g = new ChronosParser(tokens, 49100, null); try { g.program(); } catch (RecognitionException e) { e.printStackTrace(); } }
private static CommonTree runParser(CharStream charStream) throws Exception { FilterLexer lex = new FilterLexer(charStream); CommonTokenStream tokens = new CommonTokenStream(lex); FilterParser g = new FilterParser(tokens); try { filter_return fr = g.filter(); CommonTree tree = (CommonTree) fr.getTree(); return tree; } catch (RecognitionException e) { e.printStackTrace(); return null; } }
/** Load template stream into this group */ public CompiledST loadTemplateFile(String prefix, String fileName, CharStream templateStream) { GroupLexer lexer = new GroupLexer(templateStream); CommonTokenStream tokens = new CommonTokenStream(lexer); GroupParser parser = new GroupParser(tokens); parser.group = this; lexer.group = this; try { parser.templateDef(prefix); } catch (RecognitionException re) { errMgr.groupSyntaxError(ErrorType.SYNTAX_ERROR, fileName, re, re.getMessage()); } String templateName = Misc.getFileNameNoSuffix(fileName); if (prefix != null && prefix.length() > 0) templateName = prefix + "/" + templateName; return rawGetTemplate(templateName); }
protected Atom stringToAtom(String a) { ANTLRStringStream stream = new ANTLRStringStream(a); JVM vm = JVM.getVM(); LTLSpec_SymbolicAtom p = new LTLSpec_SymbolicAtom( new CommonTokenStream(new LTLSpecLexer(stream)), SymbolicLTLListener.invokedMethodName(vm), true); try { return p.atom(); } catch (RecognitionException e1) { e1.printStackTrace(); assert false : "Can not get atom from literal " + a; } return null; }
public String getErrorMessage(RecognitionException e) { if (null == walker) { return e.toString(); } else { return getErrorMessage(walker, e); } }
/** * Translate antlr internal exceptions to sane flume data flow configuration specific messages. */ @Override public String getMessage() { if (re instanceof NoViableAltException) { NoViableAltException nvae = (NoViableAltException) re; String c = StringEscapeUtils.escapeJava("" + (char) nvae.c); return "Lexer error at char '" + c + "' at line " + nvae.line + " char " + nvae.charPositionInLine; } if (re instanceof MismatchedTokenException) { MismatchedTokenException mte = (MismatchedTokenException) re; String token = (mte.token == null) ? "\"\"" : mte.token.getText(); return "Parser error: unexpected '" + token + "' at position " + mte.charPositionInLine + " line " + mte.line + ": '" + mte.input + "'"; } return "Unknown RecognitionException: " + re.getMessage(); }
// xxx: Wrong! Should use TreeGrammer and not to populate customizer with custom nodes // Should be rewritten but I have no time for this public List<JavaVMOption<?>> parse() { Set<JavaVMOption<?>> result = new HashSet<JavaVMOption<?>>(); try { vmOptions_return options_return = vmOptions(); CommonTree root = options_return.tree; if (root instanceof JavaVMOption<?>) { result.add((JavaVMOption<?>) root); } else if (root != null) { result.addAll(root.getChildren()); } } catch (RecognitionException e) { e.printStackTrace(); } result.addAll(getAllOptions()); return new LinkedList<JavaVMOption<?>>(result); }
@Test public void testTableHeaderUidNoId() { CharStream stream = new ANTLRStringStream("{header: 3}"); UdlLexer lexer = new UdlLexer(stream); TokenStream tokenStream = new CommonTokenStream(lexer); UdlParser parser = new UdlParser(tokenStream); try { MetaData data = parser.uid(); assertNotNull(data); assertTrue(data instanceof TableHeaderMetaData); TableHeaderMetaData th = (TableHeaderMetaData) data; assertEquals("_3", th.getId()); assertEquals("3", th.getIndex().getValue()); assertEquals(IndexType.VAL, th.getIndex().getType()); } catch (RecognitionException e) { fail(e.getMessage()); } }
private FacebookObject fetchData(URL url) { FacebookObject pVal = null; try { ANTLRInputStream in = new ANTLRInputStream(url.openStream()); FacebookGraphLexer lexer = new FacebookGraphLexer(in); CommonTokenStream tokens = new CommonTokenStream(lexer); FacebookGraphParser parser = new FacebookGraphParser(tokens); pVal = parser.start(); } catch (IOException e) { e.printStackTrace(); } catch (RecognitionException e) { e.printStackTrace(); } return pVal; }
@Test public void testListUidNoId() { CharStream stream = new ANTLRStringStream("{10}"); UdlLexer lexer = new UdlLexer(stream); TokenStream tokenStream = new CommonTokenStream(lexer); UdlParser parser = new UdlParser(tokenStream); try { MetaData data = parser.uid(); assertNotNull(data); assertEquals("_10", data.getId()); assertTrue(data instanceof ListMetaData); ListMetaData lm = (ListMetaData) data; assertEquals("10", lm.getIndex().getValue()); assertEquals(IndexType.VAL, lm.getIndex().getType()); } catch (RecognitionException e) { fail(e.getMessage()); } }
@Test public void testTableBodyMixedUid() { try { MetaData data = UidParser.parse("{row:3, column -> bad} as Search"); assertNotNull(data); assertEquals("Search", data.getId()); assertTrue(data instanceof TableBodyMetaData); TableBodyMetaData tbmd = (TableBodyMetaData) data; assertEquals("1", tbmd.getTbody().getValue()); assertEquals(IndexType.VAL, tbmd.getTbody().getType()); assertEquals("3", tbmd.getRow().getValue()); assertEquals(IndexType.VAL, tbmd.getRow().getType()); assertEquals("bad", tbmd.getColumn().getValue()); assertEquals(IndexType.REF, tbmd.getColumn().getType()); } catch (RecognitionException e) { e.printStackTrace(); fail(e.getMessage()); } }
public void generateDependencies(String text, EGTask task) throws Exception { try { this.task = task; DependenciesLexer lex = new DependenciesLexer(new ANTLRStringStream(text)); CommonTokenStream tokens = new CommonTokenStream(lex); DependenciesParser g = new DependenciesParser(tokens); try { g.setGenerator(this); g.prog(); } catch (RecognitionException ex) { logger.error("Unable to load mapping task: " + ex.getMessage()); throw new ParserException(ex); } } catch (Exception e) { e.printStackTrace(); logger.error(e.getLocalizedMessage()); throw new ParserException(e); } }
@Test public void testTableBodyRefUid() { CharStream stream = new ANTLRStringStream("{tbody : 1, row -> good, column -> bad} as Search"); UdlLexer lexer = new UdlLexer(stream); TokenStream tokenStream = new CommonTokenStream(lexer); UdlParser parser = new UdlParser(tokenStream); try { MetaData data = parser.uid(); assertNotNull(data); assertEquals("Search", data.getId()); assertTrue(data instanceof TableBodyMetaData); TableBodyMetaData tbmd = (TableBodyMetaData) data; assertEquals("1", tbmd.getTbody().getValue()); assertEquals(IndexType.VAL, tbmd.getTbody().getType()); assertEquals("good", tbmd.getRow().getValue()); assertEquals(IndexType.REF, tbmd.getRow().getType()); assertEquals("bad", tbmd.getColumn().getValue()); assertEquals(IndexType.REF, tbmd.getColumn().getType()); } catch (RecognitionException e) { fail(e.getMessage()); } }
@Test public void testTableBodyValUidNoId() { CharStream stream = new ANTLRStringStream("{tbody : 1, row : 2, column : 3}"); UdlLexer lexer = new UdlLexer(stream); TokenStream tokenStream = new CommonTokenStream(lexer); UdlParser parser = new UdlParser(tokenStream); try { MetaData data = parser.uid(); assertNotNull(data); assertEquals("_1_2_3", data.getId()); assertTrue(data instanceof TableBodyMetaData); TableBodyMetaData tb = (TableBodyMetaData) data; assertEquals("1", tb.getTbody().getValue()); assertEquals(IndexType.VAL, tb.getTbody().getType()); assertEquals("2", tb.getRow().getValue()); assertEquals(IndexType.VAL, tb.getRow().getType()); assertEquals("3", tb.getColumn().getValue()); assertEquals(IndexType.VAL, tb.getColumn().getType()); } catch (RecognitionException e) { fail(e.getMessage()); } }
@Test public void duplicatedAliasTestFrom() { String statement = "SELECT * FROM T1 MyAlias JOIN T2 AS MyAlias"; try { traverseStatement(statement); fail("Parsing statement " + statement + " should fail."); } catch (RecognitionException e) { assertTrue(e instanceof FailedPredicateException); LOG.debug("duplicatedAliasTestFrom(), exception: " + e); // walker.reportError(e); String errorMessage = queryUtil.getErrorMessage(e); LOG.debug(""); LOG.debug("duplicatedAliasTestFrom(), error message: " + errorMessage); assertTrue(e.toString().contains("more than once as alias in a from")); assertTrue(errorMessage.contains("more than once as alias in a from")); } catch (Exception e) { fail( "Parsing statement " + statement + " should fail with RecognitionException, but was: " + e.getClass()); } }
public static ILockingHandler createLock(String config) { try { LGenParser parser = parseConfig(config); switch (parser.getStore().getType()) { case LGenLexer.MONGODB: boolean useMongo2 = Boolean.parseBoolean( MultiValueConfigLoader.getConfig("MONGODB-lock.useVersion2", "false")); if (useMongo2) { return getLockStore( "rapture.lock.mongodb.MongoLockHandler2", parser.getConfig().getConfig()); } else { return getLockStore( "rapture.lock.mongodb.MongoLockHandler", parser.getConfig().getConfig()); } case LGenLexer.MEMORY: return getLockStore( "rapture.lock.memory.MemoryLockingHandler", parser.getConfig().getConfig()); case LGenLexer.DUMMY: return getLockStore( "rapture.lock.dummy.DummyLockHandler", parser.getConfig().getConfig()); case LGenLexer.REDIS: return getLockStore( "rapture.lock.redis.RedisLockHandler", parser.getConfig().getConfig()); case LGenLexer.ZOOKEEPER: return getLockStore( "rapture.lock.zookeeper.ZooKeeperLockHandler", parser.getConfig().getConfig()); case LGenLexer.ETCD: return getLockStore("rapture.lock.etcd.ETCDLockHandler", parser.getConfig().getConfig()); // TODO case LGenLexer.FILE: ? } } catch (RecognitionException e) { log.error("Error parsing config - " + e.getMessage()); } return null; }
public static void generate(File input, String pack) { System.out.println("Processing " + input); FileReader reader = null; FileWriter writer = null; try { // 1. Basic parsing step; building AST. reader = new FileReader(input); ANTLRv3Lexer lexer = new ANTLRv3Lexer(new ANTLRReaderStream(reader)); CommonTokenStream tokens = new CommonTokenStream(lexer); ANTLRv3Parser parser = new ANTLRv3Parser(tokens); ANTLRv3Parser.grammarDef_return r = parser.grammarDef(); CommonTree t = (CommonTree) r.getTree(); // System.out.println(t.toStringTree()); // 2. Verifying the AST. CommonTreeNodeStream nodes = new CommonTreeNodeStream(t); ANTLRv3Tree walker = new ANTLRv3Tree(nodes); walker.grammarDef(); // 3. Code generation step. Reader templatesIn = new InputStreamReader( ANTLRToFilter.class.getResourceAsStream( "/koopa/trees/antlr/filter/generator/filter.stg")); StringTemplateGroup templates = new StringTemplateGroup(templatesIn, DefaultTemplateLexer.class); nodes = new CommonTreeNodeStream(t); ANTLRv3TreeFilter filterMaker = new ANTLRv3TreeFilter(nodes); filterMaker.setTemplateLib(templates); String code = filterMaker.grammarDef(pack).toString(); // 4. Saving the result. String name = input.getName().replace(".g", "Filter.java"); // System.out.println(name); File output = new File(input.getParentFile(), name); System.out.println("Writing output to " + output); writer = new FileWriter(output); writer.append(code); writer.close(); System.out.println("Code generation complete."); } catch (IOException e) { e.printStackTrace(); System.exit(-1); } catch (RecognitionException e) { e.printStackTrace(); System.exit(-1); } catch (UnsupportedSyntaxException e) { e.printStackTrace(); System.exit(-1); } finally { try { if (reader != null) reader.close(); } catch (IOException e) { e.printStackTrace(); } try { if (writer != null) writer.close(); } catch (IOException e) { e.printStackTrace(); } } }