public static void main(String args[]) throws IOException { // process command line args CliOptions cliOptions = new CliOptions(); cliOptions.processArgs(css_, args); // connect to cassandra server if host argument specified. if (css_.hostName != null) { connect(css_.hostName, css_.thriftPort); } else { // If not, client must connect explicitly using the "connect" CLI statement. cliClient_ = new CliClient(css_, null); } ConsoleReader reader = new ConsoleReader(); reader.setBellEnabled(false); String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE; reader.setHistory(new History(new File(historyFile))); printBanner(); String line; while ((line = reader.readLine(PROMPT + "> ")) != null) { processLine(line); } }
public int processCmd(String cmd) throws TException, HiveException { SessionState ss = SessionState.get(); ss.setiscli(true); String cmd_trimmed = cmd.trim(); String[] tokens = cmd_trimmed.split("\\s+"); String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim(); int ret = 0; if (tokens[0].equalsIgnoreCase("delete")) { Vector<String> nexttoken = new Vector<String>(); nexttoken.add("jar"); nexttoken.add("file"); nexttoken.add("from"); if (tokens.length < 2 || !nexttoken.contains(tokens[1].toLowerCase())) { String errorMessage = "\nif delete resource:\n" + "Usage: delete [FILE|JAR] <value> [<value>]*\n" + "if delete table rows:\n" + "Usage: delete from tableName [where searchCondition]"; console.printError(errorMessage); ret = 1; return ret; } } if (tokens[0].equalsIgnoreCase("dfs") || tokens[0].equalsIgnoreCase("zktest")) { String errorMessage = "\ntdw hive do not support " + tokens[0].toLowerCase() + " operation\n"; throw new HiveException(errorMessage); } if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) { System.exit(0); } else if (cmd_trimmed.startsWith("!")) { String shell_cmd = cmd_trimmed.substring(1); try { Process executor = Runtime.getRuntime().exec(shell_cmd); StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, ss.out); StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, ss.err); outPrinter.start(); errPrinter.start(); ret = executor.waitFor(); if (ret != 0) { console.printError("Command failed with exit code = " + ret); } } catch (Exception e) { console.printError( "Exception raised from Shell command " + e.getLocalizedMessage(), org.apache.hadoop.util.StringUtils.stringifyException(e)); ret = 1; } } else if (tokens[0].toLowerCase().equals("list")) { SessionState.ResourceType t; if (tokens.length < 2 || (t = SessionState.find_resource_type(tokens[1])) == null) { console.printError( "Usage: list [" + StringUtils.join(SessionState.ResourceType.values(), "|") + "] [<value> [<value>]*]"); ret = 1; } else { List<String> filter = null; if (tokens.length >= 3) { System.arraycopy(tokens, 2, tokens, 0, tokens.length - 2); filter = Arrays.asList(tokens); } Set<String> s = ss.list_resource(t, filter); if (s != null && !s.isEmpty()) ss.out.println(StringUtils.join(s, "\n")); } } else { CommandProcessor proc = CommandProcessorFactory.get(tokens); if (proc != null) { if (proc instanceof Driver) { Driver qp = (Driver) proc; PrintStream out = ss.out; long start = System.currentTimeMillis(); try { ret = qp.run(cmd); } catch (Exception e1) { e1.printStackTrace(); } if (ret != 0) { qp.close(); return ret; } Vector<String> res = new Vector<String>(); try { while (qp.getResults(res)) { for (String r : res) { out.println(r); } res.clear(); if (out.checkError()) { break; } } } catch (IOException e) { console.printError( "Failed with exception " + e.getClass().getName() + ":" + e.getMessage(), "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e)); ret = 1; } int cret = qp.close(); if (ret == 0) { ret = cret; } long end = System.currentTimeMillis(); if (end > start) { double timeTaken = (double) (end - start) / 1000.0; console.printInfo("Time taken: " + timeTaken + " seconds", null); } } else { try { ret = proc.run(cmd_1); } catch (Exception e) { e.printStackTrace(); } } } } return ret; }
public static void main(String[] args) throws Exception { Config.hadoop_mode = false; if (args.length == 2 && args[0].equals("args")) // needed for mrql.flink script args = args[1].substring(1).split("!"); for (String arg : args) { Config.hadoop_mode |= arg.equals("-local") || arg.equals("-dist"); Config.bsp_mode |= arg.equals("-bsp"); Config.spark_mode |= arg.equals("-spark"); Config.flink_mode |= arg.equals("-flink"); } ; Config.map_reduce_mode = !Config.bsp_mode && !Config.spark_mode && !Config.flink_mode; initialize_evaluator(); if (Config.hadoop_mode) { conf = Evaluator.evaluator.new_configuration(); GenericOptionsParser gop = new GenericOptionsParser(conf, args); conf = gop.getConfiguration(); args = gop.getRemainingArgs(); } ; Config.parse_args(args, conf); Config.hadoop_mode = Config.local_mode || Config.distributed_mode; if (!Config.info) { for (Enumeration en = LogManager.getCurrentLoggers(); en.hasMoreElements(); ) ((Logger) en.nextElement()).setLevel(Level.WARN); LogManager.getRootLogger().setLevel(Level.WARN); } ; Evaluator.evaluator.init(conf); new TopLevel(); System.out.print("Apache MRQL version " + version + " ("); if (Config.compile_functional_arguments) System.out.print("compiled "); else System.out.print("interpreted "); if (Config.hadoop_mode) { if (Config.local_mode) System.out.print("local "); else if (Config.distributed_mode) System.out.print("distributed "); if (Config.spark_mode) System.out.println("Spark mode using " + Config.nodes + " tasks)"); else if (Config.flink_mode) System.out.println("Flink mode using " + Config.nodes + " tasks)"); else if (Config.bsp_mode) System.out.println("Hama BSP mode over " + Config.nodes + " BSP tasks)"); else if (Config.nodes > 0) System.out.println("Hadoop MapReduce mode with " + Config.nodes + " reducers)"); else if (!Config.local_mode) System.out.println("Hadoop MapReduce mode with 1 reducer, use -nodes to change it)"); else System.out.println("Hadoop MapReduce mode)"); } else if (Config.bsp_mode) System.out.println("in-memory BSP mode)"); else System.out.println("in-memory Java mode)"); if (Config.interactive) { System.out.println("Type quit to exit"); ConsoleReader reader = new ConsoleReader(); reader.setBellEnabled(false); History history = new History(new File(System.getProperty("user.home") + "/.mrqlhistory")); reader.setHistory(history); reader.setUseHistory(false); try { loop: while (true) { String line = ""; String s = ""; try { if (Config.hadoop_mode && Config.bsp_mode) Config.write(Plan.conf); do { s = reader.readLine("> "); if (s != null && (s.equals("quit") || s.equals("exit"))) break loop; if (s != null) line += " " + s; } while (s == null || s.indexOf(";") <= 0); line = line.substring(1); history.addToHistory(line); parser = new MRQLParser(new MRQLLex(new StringReader(line))); MRQLLex.reset(); parser.parse(); } catch (EOFException x) { break; } catch (Exception x) { if (x.getMessage() != null) System.out.println(x); } catch (Error x) { System.out.println(x); } } } finally { if (Config.hadoop_mode) { Plan.clean(); Evaluator.evaluator.shutdown(Plan.conf); } ; if (Config.compile_functional_arguments) Compiler.clean(); } } else try { if (Config.hadoop_mode && Config.bsp_mode) Config.write(Plan.conf); try { parser = new MRQLParser(new MRQLLex(new FileInputStream(query_file))); } catch (Exception e) { // when the query file is in HDFS Path path = new Path(query_file); FileSystem fs = path.getFileSystem(conf); parser = new MRQLParser(new MRQLLex(fs.open(path))); } ; parser.parse(); } finally { if (Config.hadoop_mode) { Plan.clean(); Evaluator.evaluator.shutdown(Plan.conf); } ; if (Config.compile_functional_arguments) Compiler.clean(); } }
public static void main(String[] args) throws Exception { OptionsProcessor oproc = new OptionsProcessor(); if (!oproc.process_stage1(args)) { System.exit(1); } SessionState.initHiveLog4j(); CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; try { ss.out = new PrintStream(System.out, true, "UTF-8"); ss.err = new PrintStream(System.err, true, "UTF-8"); } catch (UnsupportedEncodingException e) { System.exit(3); } if (!oproc.process_stage2(ss)) { System.exit(2); } HiveConf conf = ss.getConf(); for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) { conf.set((String) item.getKey(), (String) item.getValue()); } if (!ShimLoader.getHadoopShims().usesJobShell()) { ClassLoader loader = conf.getClassLoader(); String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } conf.setClassLoader(loader); Thread.currentThread().setContextClassLoader(loader); } SessionState.start(ss); CliDriver cli = new CliDriver(); Hive hive = Hive.get(); String username = ss.getUserName(); String passwd = ss.passwd; if (!hive.isAUser(username, passwd)) { System.out.println("User or passwd is wrong!"); System.exit(0); } else { System.out.println("Connect to TDW successfully!"); } if (ss.getDbName() == null) { ss.setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME); } if (ss.execString != null) { System.exit(cli.processLine(ss.execString)); } try { if (ss.fileName != null) { System.exit(cli.processReader(new BufferedReader(new FileReader(ss.fileName)))); } } catch (FileNotFoundException e) { System.err.println("Could not open input file for reading. (" + e.getMessage() + ")"); System.exit(3); } ConsoleReader reader = new ConsoleReader(); reader.setBellEnabled(false); List<SimpleCompletor> completors = new LinkedList<SimpleCompletor>(); completors.add( new SimpleCompletor( new String[] {"set", "from", "create", "load", "describe", "quit", "exit"})); reader.addCompletor(new ArgumentCompletor(completors)); String line; final String HISTORYFILE = ".hivehistory"; String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE; reader.setHistory(new History(new File(historyFile))); int ret = 0; String prefix = ""; String curPrompt = prompt; while ((line = reader.readLine(curPrompt + "> ")) != null) { if (!prefix.equals("")) { prefix += '\n'; } if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) { line = prefix + line; ret = cli.processLine(line); prefix = ""; curPrompt = prompt; } else { prefix = prefix + line; curPrompt = prompt2; continue; } } System.exit(ret); }