public static void main(String args[]) throws IOException { // process command line args CliOptions cliOptions = new CliOptions(); cliOptions.processArgs(css_, args); // connect to cassandra server if host argument specified. if (css_.hostName != null) { connect(css_.hostName, css_.thriftPort); } else { // If not, client must connect explicitly using the "connect" CLI statement. cliClient_ = new CliClient(css_, null); } ConsoleReader reader = new ConsoleReader(); reader.setBellEnabled(false); String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE; reader.setHistory(new History(new File(historyFile))); printBanner(); String line; while ((line = reader.readLine(PROMPT + "> ")) != null) { processLine(line); } }
@SuppressWarnings("unchecked") void run() throws IOException { inConsole = true; myCommands = buildMyCommands(); if (cl.getCommand() == null) { System.out.println("Welcome to Hedwig!"); System.out.println("JLine support is enabled"); console = new ConsoleReader(); JLineHedwigCompletor completor = new JLineHedwigCompletor(admin); console.addCompletor(completor); // load history file History history = new History(); File file = new File( System.getProperty( "hw.history", new File(System.getProperty("user.home"), HW_HISTORY_FILE).toString())); if (LOG.isDebugEnabled()) { LOG.debug("History file is " + file.toString()); } history.setHistoryFile(file); // set history to console reader console.setHistory(history); // load history from history file history.moveToFirstEntry(); while (history.next()) { String entry = history.current(); if (!entry.equals("")) { addToHistory(commandCount, entry); } commandCount++; } System.out.println("JLine history support is enabled"); String line; while ((line = console.readLine(getPrompt())) != null) { executeLine(line); history.addToHistory(line); } } inConsole = false; processCmd(cl); try { myCommands.get(EXIT).runCmd(new String[0]); } catch (Exception e) { } }
public static void main(String[] args) throws Exception { Config.hadoop_mode = false; if (args.length == 2 && args[0].equals("args")) // needed for mrql.flink script args = args[1].substring(1).split("!"); for (String arg : args) { Config.hadoop_mode |= arg.equals("-local") || arg.equals("-dist"); Config.bsp_mode |= arg.equals("-bsp"); Config.spark_mode |= arg.equals("-spark"); Config.flink_mode |= arg.equals("-flink"); } ; Config.map_reduce_mode = !Config.bsp_mode && !Config.spark_mode && !Config.flink_mode; initialize_evaluator(); if (Config.hadoop_mode) { conf = Evaluator.evaluator.new_configuration(); GenericOptionsParser gop = new GenericOptionsParser(conf, args); conf = gop.getConfiguration(); args = gop.getRemainingArgs(); } ; Config.parse_args(args, conf); Config.hadoop_mode = Config.local_mode || Config.distributed_mode; if (!Config.info) { for (Enumeration en = LogManager.getCurrentLoggers(); en.hasMoreElements(); ) ((Logger) en.nextElement()).setLevel(Level.WARN); LogManager.getRootLogger().setLevel(Level.WARN); } ; Evaluator.evaluator.init(conf); new TopLevel(); System.out.print("Apache MRQL version " + version + " ("); if (Config.compile_functional_arguments) System.out.print("compiled "); else System.out.print("interpreted "); if (Config.hadoop_mode) { if (Config.local_mode) System.out.print("local "); else if (Config.distributed_mode) System.out.print("distributed "); if (Config.spark_mode) System.out.println("Spark mode using " + Config.nodes + " tasks)"); else if (Config.flink_mode) System.out.println("Flink mode using " + Config.nodes + " tasks)"); else if (Config.bsp_mode) System.out.println("Hama BSP mode over " + Config.nodes + " BSP tasks)"); else if (Config.nodes > 0) System.out.println("Hadoop MapReduce mode with " + Config.nodes + " reducers)"); else if (!Config.local_mode) System.out.println("Hadoop MapReduce mode with 1 reducer, use -nodes to change it)"); else System.out.println("Hadoop MapReduce mode)"); } else if (Config.bsp_mode) System.out.println("in-memory BSP mode)"); else System.out.println("in-memory Java mode)"); if (Config.interactive) { System.out.println("Type quit to exit"); ConsoleReader reader = new ConsoleReader(); reader.setBellEnabled(false); History history = new History(new File(System.getProperty("user.home") + "/.mrqlhistory")); reader.setHistory(history); reader.setUseHistory(false); try { loop: while (true) { String line = ""; String s = ""; try { if (Config.hadoop_mode && Config.bsp_mode) Config.write(Plan.conf); do { s = reader.readLine("> "); if (s != null && (s.equals("quit") || s.equals("exit"))) break loop; if (s != null) line += " " + s; } while (s == null || s.indexOf(";") <= 0); line = line.substring(1); history.addToHistory(line); parser = new MRQLParser(new MRQLLex(new StringReader(line))); MRQLLex.reset(); parser.parse(); } catch (EOFException x) { break; } catch (Exception x) { if (x.getMessage() != null) System.out.println(x); } catch (Error x) { System.out.println(x); } } } finally { if (Config.hadoop_mode) { Plan.clean(); Evaluator.evaluator.shutdown(Plan.conf); } ; if (Config.compile_functional_arguments) Compiler.clean(); } } else try { if (Config.hadoop_mode && Config.bsp_mode) Config.write(Plan.conf); try { parser = new MRQLParser(new MRQLLex(new FileInputStream(query_file))); } catch (Exception e) { // when the query file is in HDFS Path path = new Path(query_file); FileSystem fs = path.getFileSystem(conf); parser = new MRQLParser(new MRQLLex(fs.open(path))); } ; parser.parse(); } finally { if (Config.hadoop_mode) { Plan.clean(); Evaluator.evaluator.shutdown(Plan.conf); } ; if (Config.compile_functional_arguments) Compiler.clean(); } }
/** * The Main-Class for the Pig Jar that will provide a shell and setup a classpath appropriate for * executing Jar files. * * @param args -jar can be used to add additional jar files (colon separated). - will start a * shell. -e will execute the rest of the command line as if it was input to the shell. * @throws IOException */ public static void main(String args[]) { int rc = 1; Properties properties = new Properties(); PropertiesUtil.loadPropertiesFromFile(properties); boolean verbose = false; boolean gruntCalled = false; String logFileName = null; try { BufferedReader pin = null; boolean debug = false; boolean dryrun = false; ArrayList<String> params = new ArrayList<String>(); ArrayList<String> paramFiles = new ArrayList<String>(); HashSet<String> optimizerRules = new HashSet<String>(); CmdLineParser opts = new CmdLineParser(args); opts.registerOpt('4', "log4jconf", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('b', "brief", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('c', "cluster", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('d', "debug", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('e', "execute", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('f', "file", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('h', "help", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('i', "version", CmdLineParser.ValueExpected.OPTIONAL); opts.registerOpt('j', "jar", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('l', "logfile", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('m', "param_file", CmdLineParser.ValueExpected.OPTIONAL); opts.registerOpt('o', "hod", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('p', "param", CmdLineParser.ValueExpected.OPTIONAL); opts.registerOpt('r', "dryrun", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('t', "optimizer_off", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('v', "verbose", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('w', "warning", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('x', "exectype", CmdLineParser.ValueExpected.REQUIRED); opts.registerOpt('F', "stop_on_failure", CmdLineParser.ValueExpected.NOT_ACCEPTED); opts.registerOpt('M', "no_multiquery", CmdLineParser.ValueExpected.NOT_ACCEPTED); ExecMode mode = ExecMode.UNKNOWN; String file = null; ExecType execType = ExecType.MAPREDUCE; String execTypeString = properties.getProperty("exectype"); if (execTypeString != null && execTypeString.length() > 0) { execType = PigServer.parseExecType(execTypeString); } String cluster = "local"; String clusterConfigured = properties.getProperty("cluster"); if (clusterConfigured != null && clusterConfigured.length() > 0) { cluster = clusterConfigured; } // by default warning aggregation is on properties.setProperty("aggregate.warning", "" + true); // by default multiquery optimization is on properties.setProperty("opt.multiquery", "" + true); // by default we keep going on error on the backend properties.setProperty("stop.on.failure", "" + false); char opt; while ((opt = opts.getNextOpt()) != CmdLineParser.EndOfOpts) { switch (opt) { case '4': String log4jconf = opts.getValStr(); if (log4jconf != null) { properties.setProperty(LOG4J_CONF, log4jconf); } break; case 'b': properties.setProperty(BRIEF, "true"); break; case 'c': // Needed away to specify the cluster to run the MR job on // Bug 831708 - fixed String clusterParameter = opts.getValStr(); if (clusterParameter != null && clusterParameter.length() > 0) { cluster = clusterParameter; } break; case 'd': String logLevel = opts.getValStr(); if (logLevel != null) { properties.setProperty(DEBUG, logLevel); } debug = true; break; case 'e': mode = ExecMode.STRING; break; case 'f': mode = ExecMode.FILE; file = opts.getValStr(); break; case 'F': properties.setProperty("stop.on.failure", "" + true); break; case 'h': usage(); return; case 'i': System.out.println(getVersionString()); return; case 'j': String jarsString = opts.getValStr(); if (jarsString != null) { properties.setProperty(JAR, jarsString); } break; case 'l': // call to method that validates the path to the log file // and sets up the file to store the client side log file String logFileParameter = opts.getValStr(); if (logFileParameter != null && logFileParameter.length() > 0) { logFileName = validateLogFile(logFileParameter, null); } else { logFileName = validateLogFile(logFileName, null); } properties.setProperty("pig.logfile", logFileName); break; case 'm': paramFiles.add(opts.getValStr()); break; case 'M': // turns off multiquery optimization properties.setProperty("opt.multiquery", "" + false); break; case 'o': // TODO sgroschupf using system properties is always a very bad idea String gateway = System.getProperty("ssh.gateway"); if (gateway == null || gateway.length() == 0) { properties.setProperty("hod.server", "local"); } else { properties.setProperty("hod.server", System.getProperty("ssh.gateway")); } break; case 'p': String val = opts.getValStr(); params.add(opts.getValStr()); break; case 'r': // currently only used for parameter substitution // will be extended in the future dryrun = true; break; case 't': optimizerRules.add(opts.getValStr()); break; case 'v': properties.setProperty(VERBOSE, "" + true); verbose = true; break; case 'w': properties.setProperty("aggregate.warning", "" + false); break; case 'x': try { execType = PigServer.parseExecType(opts.getValStr()); } catch (IOException e) { throw new RuntimeException("ERROR: Unrecognized exectype.", e); } break; default: { Character cc = new Character(opt); throw new AssertionError("Unhandled option " + cc.toString()); } } } // configure logging configureLog4J(properties); // create the context with the parameter PigContext pigContext = new PigContext(execType, properties); if (logFileName == null) { logFileName = validateLogFile(null, null); } pigContext.getProperties().setProperty("pig.logfile", logFileName); if (optimizerRules.size() > 0) { pigContext .getProperties() .setProperty("pig.optimizer.rules", ObjectSerializer.serialize(optimizerRules)); } LogicalPlanBuilder.classloader = pigContext.createCl(null); // construct the parameter substitution preprocessor Grunt grunt = null; BufferedReader in; String substFile = null; switch (mode) { case FILE: { // Run, using the provided file as a pig file in = new BufferedReader(new FileReader(file)); // run parameter substitution preprocessor first substFile = file + ".substituted"; pin = runParamPreprocessor(in, params, paramFiles, substFile, debug || dryrun); if (dryrun) { log.info("Dry run completed. Substituted pig script is at " + substFile); return; } logFileName = validateLogFile(logFileName, file); pigContext.getProperties().setProperty("pig.logfile", logFileName); // Set job name based on name of the script pigContext .getProperties() .setProperty(PigContext.JOB_NAME, "PigLatin:" + new File(file).getName()); if (!debug) { new File(substFile).deleteOnExit(); } grunt = new Grunt(pin, pigContext); gruntCalled = true; int results[] = grunt.exec(); rc = getReturnCodeForStats(results); return; } case STRING: { // Gather up all the remaining arguments into a string and pass them into // grunt. StringBuffer sb = new StringBuffer(); String remainders[] = opts.getRemainingArgs(); for (int i = 0; i < remainders.length; i++) { if (i != 0) sb.append(' '); sb.append(remainders[i]); } in = new BufferedReader(new StringReader(sb.toString())); grunt = new Grunt(in, pigContext); gruntCalled = true; int results[] = grunt.exec(); rc = getReturnCodeForStats(results); return; } default: break; } // If we're here, we don't know yet what they want. They may have just // given us a jar to execute, they might have given us a pig script to // execute, or they might have given us a dash (or nothing) which means to // run grunt interactive. String remainders[] = opts.getRemainingArgs(); if (remainders == null) { // Interactive mode = ExecMode.SHELL; ConsoleReader reader = new ConsoleReader(System.in, new OutputStreamWriter(System.out)); reader.setDefaultPrompt("grunt> "); final String HISTORYFILE = ".pig_history"; String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE; reader.setHistory(new History(new File(historyFile))); ConsoleReaderInputStream inputStream = new ConsoleReaderInputStream(reader); grunt = new Grunt(new BufferedReader(new InputStreamReader(inputStream)), pigContext); grunt.setConsoleReader(reader); gruntCalled = true; grunt.run(); rc = 0; return; } else { // They have a pig script they want us to run. if (remainders.length > 1) { throw new RuntimeException( "You can only run one pig script " + "at a time from the command line."); } mode = ExecMode.FILE; in = new BufferedReader(new FileReader(remainders[0])); // run parameter substitution preprocessor first substFile = remainders[0] + ".substituted"; pin = runParamPreprocessor(in, params, paramFiles, substFile, debug || dryrun); if (dryrun) { log.info("Dry run completed. Substituted pig script is at " + substFile); return; } logFileName = validateLogFile(logFileName, remainders[0]); pigContext.getProperties().setProperty("pig.logfile", logFileName); if (!debug) { new File(substFile).deleteOnExit(); } // Set job name based on name of the script pigContext .getProperties() .setProperty(PigContext.JOB_NAME, "PigLatin:" + new File(remainders[0]).getName()); grunt = new Grunt(pin, pigContext); gruntCalled = true; int[] results = grunt.exec(); rc = getReturnCodeForStats(results); return; } // Per Utkarsh and Chris invocation of jar file via pig depricated. } catch (ParseException e) { usage(); rc = 2; } catch (NumberFormatException e) { usage(); rc = 2; } catch (PigException pe) { if (pe.retriable()) { rc = 1; } else { rc = 2; } if (!gruntCalled) { LogUtils.writeLog(pe, logFileName, log, verbose); } } catch (Throwable e) { rc = 2; if (!gruntCalled) { LogUtils.writeLog(e, logFileName, log, verbose); } } finally { // clear temp files FileLocalizer.deleteTempFiles(); PerformanceTimerFactory.getPerfTimerFactory().dumpTimers(); System.exit(rc); } }
public static void main(String[] args) throws Exception { OptionsProcessor oproc = new OptionsProcessor(); if (!oproc.process_stage1(args)) { System.exit(1); } SessionState.initHiveLog4j(); CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; try { ss.out = new PrintStream(System.out, true, "UTF-8"); ss.err = new PrintStream(System.err, true, "UTF-8"); } catch (UnsupportedEncodingException e) { System.exit(3); } if (!oproc.process_stage2(ss)) { System.exit(2); } HiveConf conf = ss.getConf(); for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) { conf.set((String) item.getKey(), (String) item.getValue()); } if (!ShimLoader.getHadoopShims().usesJobShell()) { ClassLoader loader = conf.getClassLoader(); String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } conf.setClassLoader(loader); Thread.currentThread().setContextClassLoader(loader); } SessionState.start(ss); CliDriver cli = new CliDriver(); Hive hive = Hive.get(); String username = ss.getUserName(); String passwd = ss.passwd; if (!hive.isAUser(username, passwd)) { System.out.println("User or passwd is wrong!"); System.exit(0); } else { System.out.println("Connect to TDW successfully!"); } if (ss.getDbName() == null) { ss.setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME); } if (ss.execString != null) { System.exit(cli.processLine(ss.execString)); } try { if (ss.fileName != null) { System.exit(cli.processReader(new BufferedReader(new FileReader(ss.fileName)))); } } catch (FileNotFoundException e) { System.err.println("Could not open input file for reading. (" + e.getMessage() + ")"); System.exit(3); } ConsoleReader reader = new ConsoleReader(); reader.setBellEnabled(false); List<SimpleCompletor> completors = new LinkedList<SimpleCompletor>(); completors.add( new SimpleCompletor( new String[] {"set", "from", "create", "load", "describe", "quit", "exit"})); reader.addCompletor(new ArgumentCompletor(completors)); String line; final String HISTORYFILE = ".hivehistory"; String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE; reader.setHistory(new History(new File(historyFile))); int ret = 0; String prefix = ""; String curPrompt = prompt; while ((line = reader.readLine(curPrompt + "> ")) != null) { if (!prefix.equals("")) { prefix += '\n'; } if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) { line = prefix + line; ret = cli.processLine(line); prefix = ""; curPrompt = prompt; } else { prefix = prefix + line; curPrompt = prompt2; continue; } } System.exit(ret); }