Esempio n. 1
0
    /**
     * Separate from constructor, because initialize() may need to be called in a separate thread.
     */
    synchronized void initialize() {
      assertState(QueryState.CREATED);
      this.hiveConf = new HiveConf(Driver.class);

      // Update configuration with user/group info.
      if (query.hadoop_user == null) {
        throw new RuntimeException("User must be specified.");
      }

      // Update scratch dir (to have one per user)
      File scratchDir = new File("/tmp/hive-beeswax-" + query.hadoop_user);
      hiveConf.set(HiveConf.ConfVars.SCRATCHDIR.varname, scratchDir.getPath());
      // Create the temporary directory if necessary.
      // If mapred.job.tracker is set to local, this is used by MapRedTask.
      if (!scratchDir.isDirectory()) {
        if (scratchDir.exists() || !scratchDir.mkdirs()) {
          LOG.warn("Could not create tmp dir:" + scratchDir);
        }
      }

      driver = new Driver(hiveConf);
      ClassLoader loader = hiveConf.getClassLoader();
      String auxJars = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVEAUXJARS);
      if (StringUtils.isNotBlank(auxJars)) {
        try {
          loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
        } catch (Exception e) {
          LOG.error("Failed to add jars to class loader: " + auxJars, e);
        }
      }
      hiveConf.setClassLoader(loader);
      Thread.currentThread().setContextClassLoader(loader);
      SessionState.start(hiveConf); // this is thread-local
      this.sessionState = SessionState.get();

      // If this work has a LogContext, associate the children output to the logContext
      OutputStream lcOutStream = null;
      if (this.logContext != null) lcOutStream = this.logContext.getOutputStream();

      // A copy of everything goes to the LogContext.
      // In addition, stderr goes to errStream for error reporting.
      // Note that child output is explicitly tee to System.{out,err},
      // otherwise it'll be swallowed by outStream.
      this.sessionState.out = new PrintStream(new TeeOutputStream(lcOutStream, this.outStream));
      this.sessionState.err = new PrintStream(new TeeOutputStream(lcOutStream, this.errStream));
      this.sessionState.childOut =
          new PrintStream(new TeeOutputStream(System.out, sessionState.out));
      this.sessionState.childErr =
          new PrintStream(new TeeOutputStream(System.err, sessionState.err));

      this.state = QueryState.INITIALIZED;
    }
Esempio n. 2
0
  // reloading the jars under the path specified in hive.reloadable.aux.jars.path property
  public void reloadAuxJars() throws IOException {
    final Set<String> reloadedAuxJars = new HashSet<String>();

    final String renewableJarPath = conf.getVar(ConfVars.HIVERELOADABLEJARS);
    // do nothing if this property is not specified or empty
    if (renewableJarPath == null || renewableJarPath.isEmpty()) {
      return;
    }

    Set<String> jarPaths = Utilities.getJarFilesByPath(renewableJarPath);

    // load jars under the hive.reloadable.aux.jars.path
    if (!jarPaths.isEmpty()) {
      reloadedAuxJars.addAll(jarPaths);
    }

    // remove the previous renewable jars
    try {
      if (preReloadableAuxJars != null && !preReloadableAuxJars.isEmpty()) {
        Utilities.removeFromClassPath(preReloadableAuxJars.toArray(new String[0]));
      }
    } catch (Exception e) {
      String msg = "Fail to remove the reloaded jars loaded last time: " + e;
      throw new IOException(msg, e);
    }

    try {
      if (reloadedAuxJars != null && !reloadedAuxJars.isEmpty()) {
        URLClassLoader currentCLoader =
            (URLClassLoader) SessionState.get().getConf().getClassLoader();
        currentCLoader =
            (URLClassLoader)
                Utilities.addToClassPath(currentCLoader, reloadedAuxJars.toArray(new String[0]));
        conf.setClassLoader(currentCLoader);
        Thread.currentThread().setContextClassLoader(currentCLoader);
      }
      preReloadableAuxJars.clear();
      preReloadableAuxJars.addAll(reloadedAuxJars);
    } catch (Exception e) {
      String msg =
          "Fail to add jars from the path specified in hive.reloadable.aux.jars.path property: "
              + e;
      throw new IOException(msg, e);
    }
  }
Esempio n. 3
0
  public static void main(String[] args) throws Exception {
    OptionsProcessor oproc = new OptionsProcessor();
    if (!oproc.process_stage1(args)) {
      System.exit(1);
    }

    SessionState.initHiveLog4j();

    CliSessionState ss = new CliSessionState(new HiveConf(SessionState.class));
    ss.in = System.in;
    try {
      ss.out = new PrintStream(System.out, true, "UTF-8");
      ss.err = new PrintStream(System.err, true, "UTF-8");
    } catch (UnsupportedEncodingException e) {
      System.exit(3);
    }

    if (!oproc.process_stage2(ss)) {
      System.exit(2);
    }

    HiveConf conf = ss.getConf();
    for (Map.Entry<Object, Object> item : ss.cmdProperties.entrySet()) {
      conf.set((String) item.getKey(), (String) item.getValue());
    }

    if (!ShimLoader.getHadoopShims().usesJobShell()) {
      ClassLoader loader = conf.getClassLoader();
      String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
      if (StringUtils.isNotBlank(auxJars)) {
        loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
      }
      conf.setClassLoader(loader);
      Thread.currentThread().setContextClassLoader(loader);
    }

    SessionState.start(ss);

    CliDriver cli = new CliDriver();

    Hive hive = Hive.get();

    String username = ss.getUserName();
    String passwd = ss.passwd;
    if (!hive.isAUser(username, passwd)) {
      System.out.println("User or passwd is wrong!");
      System.exit(0);
    } else {
      System.out.println("Connect to TDW successfully!");
    }

    if (ss.getDbName() == null) {
      ss.setDbName(MetaStoreUtils.DEFAULT_DATABASE_NAME);
    }
    if (ss.execString != null) {
      System.exit(cli.processLine(ss.execString));
    }

    try {
      if (ss.fileName != null) {
        System.exit(cli.processReader(new BufferedReader(new FileReader(ss.fileName))));
      }
    } catch (FileNotFoundException e) {
      System.err.println("Could not open input file for reading. (" + e.getMessage() + ")");
      System.exit(3);
    }

    ConsoleReader reader = new ConsoleReader();
    reader.setBellEnabled(false);

    List<SimpleCompletor> completors = new LinkedList<SimpleCompletor>();
    completors.add(
        new SimpleCompletor(
            new String[] {"set", "from", "create", "load", "describe", "quit", "exit"}));
    reader.addCompletor(new ArgumentCompletor(completors));

    String line;
    final String HISTORYFILE = ".hivehistory";
    String historyFile = System.getProperty("user.home") + File.separator + HISTORYFILE;
    reader.setHistory(new History(new File(historyFile)));
    int ret = 0;

    String prefix = "";
    String curPrompt = prompt;
    while ((line = reader.readLine(curPrompt + "> ")) != null) {
      if (!prefix.equals("")) {
        prefix += '\n';
      }
      if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
        line = prefix + line;
        ret = cli.processLine(line);
        prefix = "";
        curPrompt = prompt;
      } else {
        prefix = prefix + line;
        curPrompt = prompt2;
        continue;
      }
    }

    System.exit(ret);
  }
  @Override
  protected void runReportal() throws Exception {
    System.out.println("Reportal Hive: Setting up Hive");
    HiveConf conf = new HiveConf(SessionState.class);

    if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
      conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
    }

    File tempTSVFile = new File("./temp.tsv");
    OutputStream tsvTempOutputStream =
        new BoundedOutputStream(
            new BufferedOutputStream(new FileOutputStream(tempTSVFile)), outputCapacity);
    PrintStream logOut = System.out;

    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    // [email protected]: I disabled this because it appears to swallow
    // all future logging (even outside of hive).
    // SessionState.initHiveLog4j();

    String orig = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

    CliSessionState sessionState = new CliSessionState(conf);
    sessionState.in = System.in;
    sessionState.out = new PrintStream(tsvTempOutputStream, true, "UTF-8");
    sessionState.err = new PrintStream(logOut, true, "UTF-8");

    OptionsProcessor oproc = new OptionsProcessor();

    // Feed in Hive Args
    String[] args = buildHiveArgs();
    if (!oproc.process_stage1(args)) {
      throw new Exception("unable to parse options stage 1");
    }

    if (!oproc.process_stage2(sessionState)) {
      throw new Exception("unable to parse options stage 2");
    }

    // Set all properties specified via command line
    for (Map.Entry<Object, Object> item : sessionState.cmdProperties.entrySet()) {
      conf.set((String) item.getKey(), (String) item.getValue());
    }

    SessionState.start(sessionState);

    String expanded = expandHiveAuxJarsPath(orig);
    if (orig == null || orig.equals(expanded)) {
      System.out.println("Hive aux jars variable not expanded");
    } else {
      System.out.println("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]");
      HiveConf.setVar(conf, HiveConf.ConfVars.HIVEAUXJARS, expanded);
    }

    if (!ShimLoader.getHadoopShims().usesJobShell()) {
      // hadoop-20 and above - we need to augment classpath using hiveconf
      // components
      // see also: code in ExecDriver.java
      ClassLoader loader = conf.getClassLoader();
      String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

      System.out.println("Got auxJars = " + auxJars);

      if (StringUtils.isNotBlank(auxJars)) {
        loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
      }
      conf.setClassLoader(loader);
      Thread.currentThread().setContextClassLoader(loader);
    }

    CliDriver cli = new CliDriver();
    int returnValue = 0;
    String prefix = "";

    returnValue = cli.processLine("set hive.cli.print.header=true;");
    String[] queries = jobQuery.split("\n");
    for (String line : queries) {
      if (!prefix.isEmpty()) {
        prefix += '\n';
      }
      if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
        line = prefix + line;
        line = injectVariables(line);
        System.out.println("Reportal Hive: Running Hive Query: " + line);
        System.out.println(
            "Reportal Hive: HiveConf HIVEAUXJARS: "
                + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS));
        returnValue = cli.processLine(line);
        prefix = "";
      } else {
        prefix = prefix + line;
        continue;
      }
    }

    tsvTempOutputStream.close();

    // convert tsv to csv and write it do disk
    System.out.println("Reportal Hive: Converting output");
    InputStream tsvTempInputStream = new BufferedInputStream(new FileInputStream(tempTSVFile));
    Scanner rowScanner = new Scanner(tsvTempInputStream);
    PrintStream csvOutputStream = new PrintStream(outputStream);
    while (rowScanner.hasNextLine()) {
      String tsvLine = rowScanner.nextLine();
      // strip all quotes, and then quote the columns
      csvOutputStream.println("\"" + tsvLine.replace("\"", "").replace("\t", "\",\"") + "\"");
    }
    rowScanner.close();
    csvOutputStream.close();

    // Flush the temp file out
    tempTSVFile.delete();

    if (returnValue != 0) {
      throw new Exception("Hive query finished with a non zero return code");
    }

    System.out.println("Reportal Hive: Ended successfully");
  }