コード例 #1
0
ファイル: HiveTestUtil.java プロジェクト: babartareen/phoenix
  private CliSessionState startSessionState() throws IOException {

    HiveConf.setVar(
        conf,
        HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
        "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");

    String execEngine = conf.get("hive.execution.engine");
    conf.set("hive.execution.engine", "mr");
    CliSessionState ss = new CliSessionState(conf);
    assert ss != null;
    ss.in = System.in;
    ss.out = System.out;
    ss.err = System.out;

    SessionState oldSs = SessionState.get();
    if (oldSs != null && clusterType == MiniClusterType.tez) {
      oldSs.close();
    }
    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
      oldSs.out.close();
    }
    SessionState.start(ss);

    isSessionStateStarted = true;

    conf.set("hive.execution.engine", execEngine);
    return ss;
  }
コード例 #2
0
ファイル: HiveTestUtil.java プロジェクト: babartareen/phoenix
  public String cliInit(String tname, boolean recreate) throws Exception {
    if (recreate) {
      cleanUp();
      createSources();
    }

    HiveConf.setVar(
        conf,
        HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
        "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
    Utilities.clearWorkMap();
    CliSessionState ss = new CliSessionState(conf);
    assert ss != null;
    ss.in = System.in;

    String outFileExtension = getOutFileExtension(tname);
    String stdoutName = null;
    if (outDir != null) {
      File qf = new File(outDir, tname);
      stdoutName = qf.getName().concat(outFileExtension);
    } else {
      stdoutName = tname + outFileExtension;
    }

    File outf = new File(logDir, stdoutName);
    OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
    if (qSortQuerySet.contains(tname)) {
      ss.out = new SortPrintStream(fo, "UTF-8");
    } else if (qHashQuerySet.contains(tname)) {
      ss.out = new DigestPrintStream(fo, "UTF-8");
    } else if (qSortNHashQuerySet.contains(tname)) {
      ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
    } else {
      ss.out = new PrintStream(fo, true, "UTF-8");
    }
    ss.err = new CachingPrintStream(fo, true, "UTF-8");
    ss.setIsSilent(true);
    SessionState oldSs = SessionState.get();

    if (oldSs != null && clusterType == MiniClusterType.tez) {
      oldSs.close();
    }

    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
      oldSs.out.close();
    }
    SessionState.start(ss);

    cliDriver = new CliDriver();
    cliDriver.processInitFiles(ss);

    return outf.getAbsolutePath();
  }
コード例 #3
0
  @Override
  protected void runReportal() throws Exception {
    System.out.println("Reportal Hive: Setting up Hive");
    HiveConf conf = new HiveConf(SessionState.class);

    if (System.getenv("HADOOP_TOKEN_FILE_LOCATION") != null) {
      conf.set("mapreduce.job.credentials.binary", System.getenv("HADOOP_TOKEN_FILE_LOCATION"));
    }

    File tempTSVFile = new File("./temp.tsv");
    OutputStream tsvTempOutputStream =
        new BoundedOutputStream(
            new BufferedOutputStream(new FileOutputStream(tempTSVFile)), outputCapacity);
    PrintStream logOut = System.out;

    // NOTE: It is critical to do this here so that log4j is reinitialized
    // before any of the other core hive classes are loaded
    // [email protected]: I disabled this because it appears to swallow
    // all future logging (even outside of hive).
    // SessionState.initHiveLog4j();

    String orig = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

    CliSessionState sessionState = new CliSessionState(conf);
    sessionState.in = System.in;
    sessionState.out = new PrintStream(tsvTempOutputStream, true, "UTF-8");
    sessionState.err = new PrintStream(logOut, true, "UTF-8");

    OptionsProcessor oproc = new OptionsProcessor();

    // Feed in Hive Args
    String[] args = buildHiveArgs();
    if (!oproc.process_stage1(args)) {
      throw new Exception("unable to parse options stage 1");
    }

    if (!oproc.process_stage2(sessionState)) {
      throw new Exception("unable to parse options stage 2");
    }

    // Set all properties specified via command line
    for (Map.Entry<Object, Object> item : sessionState.cmdProperties.entrySet()) {
      conf.set((String) item.getKey(), (String) item.getValue());
    }

    SessionState.start(sessionState);

    String expanded = expandHiveAuxJarsPath(orig);
    if (orig == null || orig.equals(expanded)) {
      System.out.println("Hive aux jars variable not expanded");
    } else {
      System.out.println("Expanded aux jars variable from [" + orig + "] to [" + expanded + "]");
      HiveConf.setVar(conf, HiveConf.ConfVars.HIVEAUXJARS, expanded);
    }

    if (!ShimLoader.getHadoopShims().usesJobShell()) {
      // hadoop-20 and above - we need to augment classpath using hiveconf
      // components
      // see also: code in ExecDriver.java
      ClassLoader loader = conf.getClassLoader();
      String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);

      System.out.println("Got auxJars = " + auxJars);

      if (StringUtils.isNotBlank(auxJars)) {
        loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
      }
      conf.setClassLoader(loader);
      Thread.currentThread().setContextClassLoader(loader);
    }

    CliDriver cli = new CliDriver();
    int returnValue = 0;
    String prefix = "";

    returnValue = cli.processLine("set hive.cli.print.header=true;");
    String[] queries = jobQuery.split("\n");
    for (String line : queries) {
      if (!prefix.isEmpty()) {
        prefix += '\n';
      }
      if (line.trim().endsWith(";") && !line.trim().endsWith("\\;")) {
        line = prefix + line;
        line = injectVariables(line);
        System.out.println("Reportal Hive: Running Hive Query: " + line);
        System.out.println(
            "Reportal Hive: HiveConf HIVEAUXJARS: "
                + HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS));
        returnValue = cli.processLine(line);
        prefix = "";
      } else {
        prefix = prefix + line;
        continue;
      }
    }

    tsvTempOutputStream.close();

    // convert tsv to csv and write it do disk
    System.out.println("Reportal Hive: Converting output");
    InputStream tsvTempInputStream = new BufferedInputStream(new FileInputStream(tempTSVFile));
    Scanner rowScanner = new Scanner(tsvTempInputStream);
    PrintStream csvOutputStream = new PrintStream(outputStream);
    while (rowScanner.hasNextLine()) {
      String tsvLine = rowScanner.nextLine();
      // strip all quotes, and then quote the columns
      csvOutputStream.println("\"" + tsvLine.replace("\"", "").replace("\t", "\",\"") + "\"");
    }
    rowScanner.close();
    csvOutputStream.close();

    // Flush the temp file out
    tempTSVFile.delete();

    if (returnValue != 0) {
      throw new Exception("Hive query finished with a non zero return code");
    }

    System.out.println("Reportal Hive: Ended successfully");
  }