static String runHive(String... args) throws Exception { ByteArrayOutputStream outBytes = new ByteArrayOutputStream(); ByteArrayOutputStream errBytes = new ByteArrayOutputStream(); PrintStream outSaved = System.out; PrintStream errSaved = System.err; System.setOut(new PrintStream(outBytes, true)); System.setErr(new PrintStream(errBytes, true)); try { CliDriver.run(args); } finally { System.setOut(outSaved); System.setErr(errSaved); } ByteArrayInputStream outBytesIn = new ByteArrayInputStream(outBytes.toByteArray()); ByteArrayInputStream errBytesIn = new ByteArrayInputStream(errBytes.toByteArray()); BufferedReader is = new BufferedReader(new InputStreamReader(outBytesIn)); BufferedReader es = new BufferedReader(new InputStreamReader(errBytesIn)); StringBuilder output = new StringBuilder(); String line; while ((line = is.readLine()) != null) { if (output.length() > 0) { output.append("\n"); } output.append(line); } if (output.length() == 0) { output = new StringBuilder(); while ((line = es.readLine()) != null) { output.append("\n"); output.append(line); } } return output.toString(); }
public String cliInit(String tname, boolean recreate) throws Exception { if (recreate) { cleanUp(); createSources(); } HiveConf.setVar( conf, HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER, "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator"); Utilities.clearWorkMap(); CliSessionState ss = new CliSessionState(conf); assert ss != null; ss.in = System.in; String outFileExtension = getOutFileExtension(tname); String stdoutName = null; if (outDir != null) { File qf = new File(outDir, tname); stdoutName = qf.getName().concat(outFileExtension); } else { stdoutName = tname + outFileExtension; } File outf = new File(logDir, stdoutName); OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf)); if (qSortQuerySet.contains(tname)) { ss.out = new SortPrintStream(fo, "UTF-8"); } else if (qHashQuerySet.contains(tname)) { ss.out = new DigestPrintStream(fo, "UTF-8"); } else if (qSortNHashQuerySet.contains(tname)) { ss.out = new SortAndDigestPrintStream(fo, "UTF-8"); } else { ss.out = new PrintStream(fo, true, "UTF-8"); } ss.err = new CachingPrintStream(fo, true, "UTF-8"); ss.setIsSilent(true); SessionState oldSs = SessionState.get(); if (oldSs != null && clusterType == MiniClusterType.tez) { oldSs.close(); } if (oldSs != null && oldSs.out != null && oldSs.out != System.out) { oldSs.out.close(); } SessionState.start(ss); cliDriver = new CliDriver(); cliDriver.processInitFiles(ss); return outf.getAbsolutePath(); }
public void createSources() throws Exception { if (!isSessionStateStarted) { startSessionState(); } conf.setBoolean("hive.test.init.phase", true); if (cliDriver == null) { cliDriver = new CliDriver(); } cliDriver.processLine("set test.data.dir=" + testFiles + ";"); conf.setBoolean("hive.test.init.phase", false); }
public int executeOne(String tname) { String q = qMap.get(tname); if (q.indexOf(";") == -1) { return -1; } String q1 = q.substring(0, q.indexOf(";") + 1); String qrest = q.substring(q.indexOf(";") + 1); qMap.put(tname, qrest); LOG.info("Executing " + q1); return cliDriver.processLine(q1); }
public void cleanUp() throws Exception { if (!isSessionStateStarted) { startSessionState(); } if (System.getenv(QTEST_LEAVE_FILES) != null) { return; } clearTablesCreatedDuringTests(); SessionState.get().getConf().setBoolean("hive.test.shutdown.phase", true); if (cleanupScript != "") { String cleanupCommands = readEntireFileIntoString(new File(cleanupScript)); LOG.info("Cleanup (" + cleanupScript + "):\n" + cleanupCommands); if (cliDriver == null) { cliDriver = new CliDriver(); } cliDriver.processLine(cleanupCommands); } SessionState.get().getConf().setBoolean("hive.test.shutdown.phase", false); // delete any contents in the warehouse dir Path p = new Path(testWarehouse); FileSystem fs = p.getFileSystem(conf); try { FileStatus[] ls = fs.listStatus(p); for (int i = 0; (ls != null) && (i < ls.length); i++) { fs.delete(ls[i].getPath(), true); } } catch (FileNotFoundException e) { // Best effort } FunctionRegistry.unregisterTemporaryUDF("test_udaf"); FunctionRegistry.unregisterTemporaryUDF("test_error"); }
public int executeClient(String tname) { return cliDriver.processLine(getCommands(tname), false); }
public int executeClient(String tname1, String tname2) { String commands = getCommands(tname1) + CRLF + getCommands(tname2); return cliDriver.processLine(commands); }
public void init(String tname) throws Exception { cleanUp(); createSources(); cliDriver.processCmd("set hive.cli.print.header=true;"); }
public static void main(String[] args) throws Exception { OptionsProcessor oproc = new OptionsProcessor(); if (!oproc.process_stage1(args)) { System.exit(1); } SessionState.initHiveLog4j(); CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class)); ss.in = System.in; try { ss.out = new PrintStream(System.out, true, "UTF-8"); ss.err = new PrintStream(System.err, true, "UTF-8"); } catch (UnsupportedEncodingException e) { System.exit(3); } if (!oproc.process_stage2(ss)) { System.exit(2); } HiveConf conf = ss.getConf(); for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) { conf.set((String) item.getKey(), (String) item.getValue()); } if (!ShimLoader.getHadoopShims().usesJobShell()) { ClassLoader loader = conf.getClassLoader(); String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } conf.setClassLoader(loader); Thread.currentThread().setContextClassLoader(loader); } SessionState.start(ss); CliDriver cli = new CliDriver(); Hive hive = Hive.get(); String username = ss.getUserName(); String passwd = ss.passwd; if (!hive.isAUser(username, passwd)) { System.out.println("User or passwd is wrong!"); System.exit(0); } else { System.out.println("Connect to TDW successfully!"); } if (ss.getDbName() == null) { ss.setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME); } if (ss.execString != null) { System.exit(cli.processLine(ss.execString)); } try { if (ss.fileName != null) { System.exit(cli.processReader(new BufferedReader(new FileReader(ss.fileName)))); } } catch (FileNotFoundException e) { System.err.println("Could not open input file for reading. (" + e.getMessage() + ")"); System.exit(3); } ConsoleReader reader = new ConsoleReader(); reader.setBellEnabled(false); List<SimpleCompletor> completors = new LinkedList<SimpleCompletor>(); completors.add( new SimpleCompletor( new String[] {"set", "from", "create", "load", "describe", "quit", "exit"})); reader.addCompletor(new ArgumentCompletor(completors)); String line; final String HISTORYFILE = ".hivehistory"; String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE; reader.setHistory(new History(new File(historyFile))); int ret = 0; String prefix = ""; String curPrompt = prompt; while ((line = reader.readLine(curPrompt + "> ")) != null) { if (!prefix.equals("")) { prefix += '\n'; } if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) { line = prefix + line; ret = cli.processLine(line); prefix = ""; curPrompt = prompt; } else { prefix = prefix + line; curPrompt = prompt2; continue; } } System.exit(ret); }
@Override protected void runReportal() throws Exception { System.out.println("Reportal Hive: Setting up Hive"); HiveConf conf = new HiveConf(SessionState.class); if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) { conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION")); } File tempTSVFile = new File("./temp.tsv"); OutputStream tsvTempOutputStream = new BoundedOutputStream( new BufferedOutputStream(new FileOutputStream(tempTSVFile)), outputCapacity); PrintStream logOut = System.out; // NOTE: It is critical to do this here so that log4j is reinitialized // before any of the other core hive classes are loaded // [email protected]: I disabled this because it appears to swallow // all future logging (even outside of hive). // SessionState.initHiveLog4j(); String orig = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); CliSessionState sessionState = new CliSessionState(conf); sessionState.in = System.in; sessionState.out = new PrintStream(tsvTempOutputStream, true, "UTF-8"); sessionState.err = new PrintStream(logOut, true, "UTF-8"); OptionsProcessor oproc = new OptionsProcessor(); // Feed in Hive Args String[] args = buildHiveArgs(); if (!oproc.process_stage1(args)) { throw new Exception("unable to parse options stage 1"); } if (!oproc.process_stage2(sessionState)) { throw new Exception("unable to parse options stage 2"); } // Set all properties specified via command line for (Map.Entry<Object, Object> item : sessionState.cmdProperties.entrySet()) { conf.set((String) item.getKey(), (String) item.getValue()); } SessionState.start(sessionState); String expanded = expandHiveAuxJarsPath(orig); if (orig == null || orig.equals(expanded)) { System.out.println("Hive aux jars variable not expanded"); } else { System.out.println("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]"); HiveConf.setVar(conf, HiveConf.ConfVars.HIVEAUXJARS, expanded); } if (!ShimLoader.getHadoopShims().usesJobShell()) { // hadoop-20 and above - we need to augment classpath using hiveconf // components // see also: code in ExecDriver.java ClassLoader loader = conf.getClassLoader(); String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS); System.out.println("Got auxJars = " + auxJars); if (StringUtils.isNotBlank(auxJars)) { loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ",")); } conf.setClassLoader(loader); Thread.currentThread().setContextClassLoader(loader); } CliDriver cli = new CliDriver(); int returnValue = 0; String prefix = ""; returnValue = cli.processLine("set hive.cli.print.header=true;"); String[] queries = jobQuery.split("\n"); for (String line : queries) { if (!prefix.isEmpty()) { prefix += '\n'; } if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) { line = prefix + line; line = injectVariables(line); System.out.println("Reportal Hive: Running Hive Query: " + line); System.out.println( "Reportal Hive: HiveConf HIVEAUXJARS: " + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS)); returnValue = cli.processLine(line); prefix = ""; } else { prefix = prefix + line; continue; } } tsvTempOutputStream.close(); // convert tsv to csv and write it do disk System.out.println("Reportal Hive: Converting output"); InputStream tsvTempInputStream = new BufferedInputStream(new FileInputStream(tempTSVFile)); Scanner rowScanner = new Scanner(tsvTempInputStream); PrintStream csvOutputStream = new PrintStream(outputStream); while (rowScanner.hasNextLine()) { String tsvLine = rowScanner.nextLine(); // strip all quotes, and then quote the columns csvOutputStream.println("\"" + tsvLine.replace("\"", "").replace("\t", "\",\"") + "\""); } rowScanner.close(); csvOutputStream.close(); // Flush the temp file out tempTSVFile.delete(); if (returnValue != 0) { throw new Exception("Hive query finished with a non zero return code"); } System.out.println("Reportal Hive: Ended successfully"); }