/**
   * Reads the temporary results for non-Hive (non-Driver) commands to the resulting List of
   * strings.
   *
   * @param nLines number of lines read at once. If it is <= 0, then read all lines.
   */
  private List<String> readResults(int nLines) throws HiveSQLException {
    if (resultReader == null) {
      SessionState sessionState = getParentSession().getSessionState();
      File tmp = sessionState.getTmpOutputFile();
      try {
        resultReader = new BufferedReader(new FileReader(tmp));
      } catch (FileNotFoundException e) {
        LOG.error("File " + tmp + " not found. ", e);
        throw new HiveSQLException(e);
      }
    }
    List<String> results = new ArrayList<String>();

    for (int i = 0; i < nLines || nLines <= 0; ++i) {
      try {
        String line = resultReader.readLine();
        if (line == null) {
          // reached the end of the result file
          break;
        } else {
          results.add(line);
        }
      } catch (IOException e) {
        LOG.error("Reading temp results encountered an exception: ", e);
        throw new HiveSQLException(e);
      }
    }
    return results;
  }
示例#2
0
  public static CommandProcessor get(String cmd, HiveConf conf) {
    String cmdl = cmd.toLowerCase();

    if ("set".equals(cmdl)) {
      return new SetProcessor();
    } else if ("dfs".equals(cmdl)) {
      SessionState ss = SessionState.get();
      return new DfsProcessor(ss.getConf());
    } else if ("add".equals(cmdl)) {
      return new AddResourceProcessor();
    } else if ("delete".equals(cmdl)) {
      return new DeleteResourceProcessor();
    } else if (!isBlank(cmd)) {
      if (conf == null) {
        return new Driver();
      }

      Driver drv = mapDrivers.get(conf);
      if (drv == null) {
        drv = new Driver();
        mapDrivers.put(conf, drv);
      }
      drv.init();
      return drv;
    }

    return null;
  }
示例#3
0
  // should be called after session registry is checked
  private FunctionInfo registerToSessionRegistry(String qualifiedName, FunctionInfo function) {
    FunctionInfo ret = null;
    ClassLoader prev = Utilities.getSessionSpecifiedClassLoader();
    try {
      // Found UDF in metastore - now add it to the function registry
      // At this point we should add any relevant jars that would be needed for the UDf.
      FunctionResource[] resources = function.getResources();
      try {
        FunctionTask.addFunctionResources(resources);
      } catch (Exception e) {
        LOG.error("Unable to load resources for " + qualifiedName + ":" + e, e);
        return null;
      }
      ClassLoader loader = Utilities.getSessionSpecifiedClassLoader();
      Class<?> udfClass = Class.forName(function.getClassName(), true, loader);

      ret = FunctionRegistry.registerTemporaryUDF(qualifiedName, udfClass, resources);
      if (ret == null) {
        LOG.error(function.getClassName() + " is not a valid UDF class and was not registered.");
      }
      if (SessionState.get().isHiveServerQuery()) {
        SessionState.getRegistryForWrite().addToUDFLoaders(loader);
      }
    } catch (ClassNotFoundException e) {
      // Lookup of UDf class failed
      LOG.error("Unable to load UDF class: " + e);
      Utilities.restoreSessionSpecifiedClassLoader(prev);
    }
    function.shareStateWith(ret);
    return ret;
  }
示例#4
0
  private static int executeCmd(String[] args, String outFile, String errFile) throws Exception {
    LOG.info("Running: " + org.apache.commons.lang.StringUtils.join(args, ' '));

    PrintStream out =
        outFile == null
            ? SessionState.getConsole().getChildOutStream()
            : new PrintStream(new FileOutputStream(outFile), true);
    PrintStream err =
        errFile == null
            ? SessionState.getConsole().getChildErrStream()
            : new PrintStream(new FileOutputStream(errFile), true);

    Process executor = Runtime.getRuntime().exec(args);

    StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, err);
    StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, out);

    outPrinter.start();
    errPrinter.start();

    int result = executor.waitFor();

    outPrinter.join();
    errPrinter.join();

    if (outFile != null) {
      out.close();
    }

    if (errFile != null) {
      err.close();
    }

    return result;
  }
示例#5
0
  private CliSessionState startSessionState() throws IOException {

    HiveConf.setVar(
        conf,
        HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
        "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");

    String execEngine = conf.get("hive.execution.engine");
    conf.set("hive.execution.engine", "mr");
    CliSessionState ss = new CliSessionState(conf);
    assert ss != null;
    ss.in = System.in;
    ss.out = System.out;
    ss.err = System.out;

    SessionState oldSs = SessionState.get();
    if (oldSs != null && clusterType == MiniClusterType.tez) {
      oldSs.close();
    }
    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
      oldSs.out.close();
    }
    SessionState.start(ss);

    isSessionStateStarted = true;

    conf.set("hive.execution.engine", execEngine);
    return ss;
  }
  public void generateTestData() throws Exception {

    // remove data from previous runs.
    cleanDir(DB_DIR);
    cleanDir(WH_DIR);

    HiveConf conf = new HiveConf();

    conf.set(
        "javax.jdo.option.ConnectionURL",
        String.format("jdbc:derby:;databaseName=%s;create=true", DB_DIR));
    conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "file:///");
    conf.set("hive.metastore.warehouse.dir", WH_DIR);

    SessionState ss = new SessionState(new HiveConf(SessionState.class));
    SessionState.start(ss);
    hiveDriver = new Driver(conf);

    // generate (key, value) test data
    String testDataFile = generateTestDataFile();

    createTableAndLoadData("default", "kv", testDataFile);
    executeQuery("CREATE DATABASE IF NOT EXISTS db1");
    createTableAndLoadData("db1", "kv_db1", testDataFile);

    ss.close();
  }
示例#7
0
 public static String getCurrentDB() {
   String currentDB = null;
   if (SessionState.get() != null) {
     currentDB = SessionState.get().getCurrentDatabase();
   }
   return currentDB == null ? "default" : currentDB;
 }
示例#8
0
 public void failed(int ecode, String fname, String debugHint) {
   String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null;
   Assert.fail(
       "Client Execution failed with error code = "
           + ecode
           + (command != null ? " running " + command : "")
           + (debugHint != null ? debugHint : ""));
 }
 private HiveOperation getCurrentHiveStmtOp() {
   SessionState sessState = SessionState.get();
   if (sessState == null) {
     // TODO: Warn
     return null;
   }
   return sessState.getHiveOperation();
 }
示例#10
0
  /**
   * set current session to existing session object if a thread is running multiple sessions - it
   * must call this method with the new session object when switching from one session to another.
   */
  public static SessionState start(SessionState startSs) {
    setCurrentSessionState(startSs);

    if (startSs.hiveHist == null) {
      if (startSs.getConf().getBoolVar(HiveConf.ConfVars.HIVE_SESSION_HISTORY_ENABLED)) {
        startSs.hiveHist = new HiveHistoryImpl(startSs);
      } else {
        // Hive history is disabled, create a no-op proxy
        startSs.hiveHist = HiveHistoryProxyHandler.getNoOpHiveHistoryProxy();
      }
    }

    // Get the following out of the way when you start the session these take a
    // while and should be done when we start up.
    try {
      // Hive object instance should be created with a copy of the conf object. If the conf is
      // shared with SessionState, other parts of the code might update the config, but
      // Hive.get(HiveConf) would not recognize the case when it needs refreshing
      Hive.get(new HiveConf(startSs.conf)).getMSC();
      UserGroupInformation sessionUGI = Utils.getUGI();
      FileSystem.get(startSs.conf);

      // Create scratch dirs for this session
      startSs.createSessionDirs(sessionUGI.getShortUserName());

      // Set temp file containing results to be sent to HiveClient
      if (startSs.getTmpOutputFile() == null) {
        try {
          startSs.setTmpOutputFile(createTempFile(startSs.getConf()));
        } catch (IOException e) {
          throw new RuntimeException(e);
        }
      }

    } catch (Exception e) {
      // Catch-all due to some exec time dependencies on session state
      // that would cause ClassNoFoundException otherwise
      throw new RuntimeException(e);
    }

    if (HiveConf.getVar(startSs.getConf(), HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")
        && (startSs.isHiveServerQuery == false)) {
      try {
        if (startSs.tezSessionState == null) {
          startSs.tezSessionState = new TezSessionState(startSs.getSessionId());
        }
        if (!startSs.tezSessionState.isOpen()) {
          startSs.tezSessionState.open(startSs.conf); // should use conf on session start-up
        }
      } catch (Exception e) {
        throw new RuntimeException(e);
      }
    } else {
      LOG.info("No Tez session required at this point. hive.execution.engine=mr.");
    }
    return startSs;
  }
示例#11
0
 private void applyAuthorizationConfigPolicy(HiveConf newHiveConf)
     throws HiveException, MetaException {
   // authorization setup using SessionState should be revisited eventually, as
   // authorization and authentication are not session specific settings
   SessionState ss = new SessionState(newHiveConf);
   ss.setIsHiveServerQuery(true);
   SessionState.start(ss);
   ss.applyAuthorizationPolicy();
 }
示例#12
0
  /** Clear out any side effects of running tests */
  public void clearTablesCreatedDuringTests() throws Exception {
    if (System.getenv(QTEST_LEAVE_FILES) != null) {
      return;
    }

    // Delete any tables other than the source tables
    // and any databases other than the default database.
    for (String dbName : db.getAllDatabases()) {
      SessionState.get().setCurrentDatabase(dbName);
      for (String tblName : db.getAllTables()) {
        if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
          Table tblObj = db.getTable(tblName);
          // dropping index table can not be dropped directly. Dropping the base
          // table will automatically drop all its index table
          if (tblObj.isIndexTable()) {
            continue;
          }
          db.dropTable(dbName, tblName);
        } else {
          // this table is defined in srcTables, drop all indexes on it
          List<Index> indexes = db.getIndexes(dbName, tblName, (short) -1);
          if (indexes != null && indexes.size() > 0) {
            for (Index index : indexes) {
              db.dropIndex(dbName, tblName, index.getIndexName(), true, true);
            }
          }
        }
      }
      if (!DEFAULT_DATABASE_NAME.equals(dbName)) {
        // Drop cascade, may need to drop functions
        db.dropDatabase(dbName, true, true, true);
      }
    }

    // delete remaining directories for external tables (can affect stats for following tests)
    try {
      Path p = new Path(testWarehouse);
      FileSystem fileSystem = p.getFileSystem(conf);
      if (fileSystem.exists(p)) {
        for (FileStatus status : fileSystem.listStatus(p)) {
          if (status.isDir()) {
            fileSystem.delete(status.getPath(), true);
          }
        }
      }
    } catch (IllegalArgumentException e) {
      // ignore.. provides invalid url sometimes intentionally
    }
    SessionState.get().setCurrentDatabase(DEFAULT_DATABASE_NAME);

    List<String> roleNames = db.getAllRoleNames();
    for (String roleName : roleNames) {
      if (!"PUBLIC".equalsIgnoreCase(roleName) && !"ADMIN".equalsIgnoreCase(roleName)) {
        db.dropRole(roleName);
      }
    }
  }
示例#13
0
 public static Path getTempTableSpace(Configuration conf) {
   SessionState ss = SessionState.get();
   if (ss == null) {
     String tempTablePathString = conf.get(TMP_TABLE_SPACE_KEY);
     Preconditions.checkNotNull(
         tempTablePathString, "Conf temp table path expected to be non-null");
     return new Path(tempTablePathString);
   }
   return ss.getTempTableSpace();
 }
示例#14
0
    /** A constructor. */
    public HiveServerHandler() throws MetaException {
      super(HiveServer.class.getName());

      isHiveQuery = false;
      SessionState session = new SessionState(new HiveConf(SessionState.class));
      SessionState.start(session);
      session.in = null;
      session.out = null;
      session.err = null;
      driver = new Driver();
    }
示例#15
0
  public String cliInit(String tname, boolean recreate) throws Exception {
    if (recreate) {
      cleanUp();
      createSources();
    }

    HiveConf.setVar(
        conf,
        HiveConf.ConfVars.HIVE_AUTHENTICATOR_MANAGER,
        "org.apache.hadoop.hive.ql.security.HadoopDefaultAuthenticator");
    Utilities.clearWorkMap();
    CliSessionState ss = new CliSessionState(conf);
    assert ss != null;
    ss.in = System.in;

    String outFileExtension = getOutFileExtension(tname);
    String stdoutName = null;
    if (outDir != null) {
      File qf = new File(outDir, tname);
      stdoutName = qf.getName().concat(outFileExtension);
    } else {
      stdoutName = tname + outFileExtension;
    }

    File outf = new File(logDir, stdoutName);
    OutputStream fo = new BufferedOutputStream(new FileOutputStream(outf));
    if (qSortQuerySet.contains(tname)) {
      ss.out = new SortPrintStream(fo, "UTF-8");
    } else if (qHashQuerySet.contains(tname)) {
      ss.out = new DigestPrintStream(fo, "UTF-8");
    } else if (qSortNHashQuerySet.contains(tname)) {
      ss.out = new SortAndDigestPrintStream(fo, "UTF-8");
    } else {
      ss.out = new PrintStream(fo, true, "UTF-8");
    }
    ss.err = new CachingPrintStream(fo, true, "UTF-8");
    ss.setIsSilent(true);
    SessionState oldSs = SessionState.get();

    if (oldSs != null && clusterType == MiniClusterType.tez) {
      oldSs.close();
    }

    if (oldSs != null && oldSs.out != null && oldSs.out != System.out) {
      oldSs.out.close();
    }
    SessionState.start(ss);

    cliDriver = new CliDriver();
    cliDriver.processInitFiles(ss);

    return outf.getAbsolutePath();
  }
示例#16
0
 public void failed(Throwable e, String fname, String debugHint) {
   String command = SessionState.get() != null ? SessionState.get().getLastCommand() : null;
   LOG.error("Exception: ", e);
   e.printStackTrace();
   LOG.error("Failed query: " + fname);
   Assert.fail(
       "Unexpected exception "
           + org.apache.hadoop.util.StringUtils.stringifyException(e)
           + "\n"
           + (command != null ? " running " + command : "")
           + (debugHint != null ? debugHint : ""));
 }
示例#17
0
    /**
     * Separate from constructor, because initialize() may need to be called in a separate thread.
     */
    synchronized void initialize() {
      assertState(QueryState.CREATED);
      this.hiveConf = new HiveConf(Driver.class);

      // Update configuration with user/group info.
      if (query.hadoop_user == null) {
        throw new RuntimeException("User must be specified.");
      }

      // Update scratch dir (to have one per user)
      File scratchDir = new File("/tmp/hive-beeswax-" + query.hadoop_user);
      hiveConf.set(HiveConf.ConfVars.SCRATCHDIR.varname, scratchDir.getPath());
      // Create the temporary directory if necessary.
      // If mapred.job.tracker is set to local, this is used by MapRedTask.
      if (!scratchDir.isDirectory()) {
        if (scratchDir.exists() || !scratchDir.mkdirs()) {
          LOG.warn("Could not create tmp dir:" + scratchDir);
        }
      }

      driver = new Driver(hiveConf);
      ClassLoader loader = hiveConf.getClassLoader();
      String auxJars = HiveConf.getVar(hiveConf, HiveConf.ConfVars.HIVEAUXJARS);
      if (StringUtils.isNotBlank(auxJars)) {
        try {
          loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
        } catch (Exception e) {
          LOG.error("Failed to add jars to class loader: " + auxJars, e);
        }
      }
      hiveConf.setClassLoader(loader);
      Thread.currentThread().setContextClassLoader(loader);
      SessionState.start(hiveConf); // this is thread-local
      this.sessionState = SessionState.get();

      // If this work has a LogContext, associate the children output to the logContext
      OutputStream lcOutStream = null;
      if (this.logContext != null) lcOutStream = this.logContext.getOutputStream();

      // A copy of everything goes to the LogContext.
      // In addition, stderr goes to errStream for error reporting.
      // Note that child output is explicitly tee to System.{out,err},
      // otherwise it'll be swallowed by outStream.
      this.sessionState.out = new PrintStream(new TeeOutputStream(lcOutStream, this.outStream));
      this.sessionState.err = new PrintStream(new TeeOutputStream(lcOutStream, this.errStream));
      this.sessionState.childOut =
          new PrintStream(new TeeOutputStream(System.out, sessionState.out));
      this.sessionState.childErr =
          new PrintStream(new TeeOutputStream(System.err, sessionState.err));

      this.state = QueryState.INITIALIZED;
    }
示例#18
0
  @Override
  public void close() throws HiveSQLException {
    setState(OperationState.CLOSED);
    if (driver != null) {
      driver.close();
      driver.destroy();
    }

    SessionState session = SessionState.get();
    if (session.getTmpOutputFile() != null) {
      session.getTmpOutputFile().delete();
    }
  }
示例#19
0
  @BeforeClass
  public void setUp() throws Exception {
    // Set-up hive session
    HiveConf conf = new HiveConf();
    driver = new Driver(conf);
    ss = new SessionState(conf, System.getProperty("user.name"));
    ss = SessionState.start(ss);
    SessionState.setCurrentSessionState(ss);

    Configuration configuration = ApplicationProperties.get();
    dgiCLient =
        new AtlasClient(configuration.getString(HiveMetaStoreBridge.ATLAS_ENDPOINT, DGI_URL));
  }
示例#20
0
  static void validateFiles(List<String> newFiles) throws IllegalArgumentException {
    SessionState ss = SessionState.get();
    Configuration conf = (ss == null) ? new Configuration() : ss.getConf();

    for (String newFile : newFiles) {
      try {
        if (Utilities.realFile(newFile, conf) == null) {
          String message = newFile + " does not exist";
          throw new IllegalArgumentException(message);
        }
      } catch (IOException e) {
        String message = "Unable to validate " + newFile;
        throw new IllegalArgumentException(message, e);
      }
    }
  }
  public static void baseSetup() throws Exception {
    MiniDFSShim dfs = ShimLoader.getHadoopShims().getMiniDfs(conf, 4, true, null);
    fs = dfs.getFileSystem();
    baseDfsDir = new Path(new Path(fs.getUri()), "/base");
    fs.mkdirs(baseDfsDir);
    warehouseDir = new Path(baseDfsDir, "warehouse");
    fs.mkdirs(warehouseDir);
    conf.setVar(ConfVars.METASTOREWAREHOUSE, warehouseDir.toString());

    // Assuming the tests are run either in C or D drive in Windows OS!
    dataFileDir =
        conf.get("test.data.files")
            .replace('\\', '/')
            .replace("c:", "")
            .replace("C:", "")
            .replace("D:", "")
            .replace("d:", "");
    dataFilePath = new Path(dataFileDir, "kv1.txt");

    // Set up scratch directory
    Path scratchDir = new Path(baseDfsDir, "scratchdir");
    conf.setVar(HiveConf.ConfVars.SCRATCHDIR, scratchDir.toString());

    // set hive conf vars
    conf.setBoolVar(HiveConf.ConfVars.HIVE_SUPPORT_CONCURRENCY, false);
    conf.setBoolVar(HiveConf.ConfVars.HIVE_WAREHOUSE_SUBDIR_INHERIT_PERMS, true);
    conf.setVar(HiveConf.ConfVars.DYNAMICPARTITIONINGMODE, "nonstrict");
    int port = MetaStoreUtils.findFreePort();
    MetaStoreUtils.startMetaStore(port, ShimLoader.getHadoopThriftAuthBridge());

    SessionState.start(new CliSessionState(conf));
    driver = new Driver(conf);
    setupDataTable();
  }
示例#22
0
  private static int executeDiffCommand(
      String inFileName, String outFileName, boolean ignoreWhiteSpace, boolean sortResults)
      throws Exception {

    int result = 0;

    if (sortResults) {
      // sort will try to open the output file in write mode on windows. We need to
      // close it first.
      SessionState ss = SessionState.get();
      if (ss != null && ss.out != null && ss.out != System.out) {
        ss.out.close();
      }

      String inSorted = inFileName + SORT_SUFFIX;
      String outSorted = outFileName + SORT_SUFFIX;

      result = sortFiles(inFileName, inSorted);
      result |= sortFiles(outFileName, outSorted);
      if (result != 0) {
        LOG.error("ERROR: Could not sort files before comparing");
        return result;
      }
      inFileName = inSorted;
      outFileName = outSorted;
    }

    ArrayList<String> diffCommandArgs = new ArrayList<String>();
    diffCommandArgs.add("diff");

    // Text file comparison
    diffCommandArgs.add("-a");

    // Ignore changes in the amount of white space
    if (ignoreWhiteSpace || Shell.WINDOWS) {
      diffCommandArgs.add("-b");
    }

    // Files created on Windows machines have different line endings
    // than files created on Unix/Linux. Windows uses carriage return and line feed
    // ("\r\n") as a line ending, whereas Unix uses just line feed ("\n").
    // Also StringBuilder.toString(), Stream to String conversions adds extra
    // spaces at the end of the line.
    if (Shell.WINDOWS) {
      diffCommandArgs.add("--strip-trailing-cr"); // Strip trailing carriage return on input
      diffCommandArgs.add("-B"); // Ignore changes whose lines are all blank
    }
    // Add files to compare to the arguments list
    diffCommandArgs.add(getQuotedString(inFileName));
    diffCommandArgs.add(getQuotedString(outFileName));

    result = executeCmd(diffCommandArgs);

    if (sortResults) {
      new File(inFileName).delete();
      new File(outFileName).delete();
    }

    return result;
  }
 // Check if this write entity needs to skipped
 private boolean filterWriteEntity(WriteEntity writeEntity) throws AuthorizationException {
   // skip URI validation for session scratch file URIs
   if (writeEntity.isTempURI()) {
     return true;
   }
   try {
     if (writeEntity.getTyp().equals(Type.DFS_DIR)
         || writeEntity.getTyp().equals(Type.LOCAL_DIR)) {
       HiveConf conf = SessionState.get().getConf();
       String warehouseDir = conf.getVar(ConfVars.METASTOREWAREHOUSE);
       URI scratchURI =
           new URI(PathUtils.parseDFSURI(warehouseDir, conf.getVar(HiveConf.ConfVars.SCRATCHDIR)));
       URI requestURI =
           new URI(PathUtils.parseDFSURI(warehouseDir, writeEntity.getLocation().getPath()));
       LOG.debug("scratchURI = " + scratchURI + ", requestURI = " + requestURI);
       if (PathUtils.impliesURI(scratchURI, requestURI)) {
         return true;
       }
       URI localScratchURI =
           new URI(PathUtils.parseLocalURI(conf.getVar(HiveConf.ConfVars.LOCALSCRATCHDIR)));
       URI localRequestURI = new URI(PathUtils.parseLocalURI(writeEntity.getLocation().getPath()));
       LOG.debug(
           "localScratchURI = " + localScratchURI + ", localRequestURI = " + localRequestURI);
       if (PathUtils.impliesURI(localScratchURI, localRequestURI)) {
         return true;
       }
     }
   } catch (Exception e) {
     throw new AuthorizationException("Failed to extract uri details", e);
   }
   return false;
 }
示例#24
0
    /**
     * Executes a query.
     *
     * @param cmd HiveQL query to execute
     */
    public void execute(String cmd) throws HiveServerException, TException {
      HiveServerHandler.LOG.info("Running the query: " + cmd);
      SessionState ss = SessionState.get();

      String cmd_trimmed = cmd.trim();
      String[] tokens = cmd_trimmed.split("\\s");
      String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();

      int ret = 0;
      try {
        CommandProcessor proc = CommandProcessorFactory.get(tokens[0]);
        if (proc != null) {
          if (proc instanceof Driver) {
            isHiveQuery = true;
            ret = driver.run(cmd);
          } else {
            isHiveQuery = false;
            ret = proc.run(cmd_1);
          }
        }
      } catch (Exception e) {
        throw new HiveServerException("Error running query: " + e.toString());
      }

      if (ret != 0) {
        throw new HiveServerException("Query returned non-zero code: " + ret);
      }
    }
示例#25
0
 public static IOContext get() {
   if (SessionState.get() == null) {
     // this happens on the backend. only one io context needed.
     return ioContext;
   }
   return IOContext.threadLocal.get();
 }
示例#26
0
 /**
  * This method is called in the Driver on every task. It updates counters and calls execute(),
  * which is overridden in each task
  *
  * @return return value of execute()
  */
 public int executeTask() {
   try {
     SessionState ss = SessionState.get();
     this.setStarted();
     if (ss != null) {
       ss.getHiveHistory().logPlanProgress(queryPlan);
     }
     int retval = execute(driverContext);
     this.setDone();
     if (ss != null) {
       ss.getHiveHistory().logPlanProgress(queryPlan);
     }
     return retval;
   } catch (IOException e) {
     throw new RuntimeException("Unexpected error: " + e.getMessage(), e);
   }
 }
  public HiveAuthzBindingHook() throws Exception {
    SessionState session = SessionState.get();
    if (session == null) {
      throw new IllegalStateException("Session has not been started");
    }
    // HACK: set a random classname to force the Auth V2 in Hive
    SessionState.get().setAuthorizer(null);

    HiveConf hiveConf = session.getConf();
    if (hiveConf == null) {
      throw new IllegalStateException("Session HiveConf is null");
    }
    authzConf = loadAuthzConf(hiveConf);
    hiveAuthzBinding = new HiveAuthzBinding(hiveConf, authzConf);

    FunctionRegistry.setupPermissionsForBuiltinUDFs("", HiveAuthzConf.HIVE_UDF_BLACK_LIST);
  }
 public String aggregateStats(String keyPrefix, String statType) {
   SessionState ss = SessionState.get();
   // Have to use the length instead of the actual prefix because the prefix is location dependent
   // 17 is 16 (16 byte MD5 hash) + 1 for the path separator
   // Can be less than 17 due to unicode characters
   ss.out.println("Stats prefix is hashed: " + new Boolean(keyPrefix.length() <= 17));
   return null;
 }
示例#29
0
  public static void main(String[] args) throws Exception {

    HiveConf conf = new HiveConf();
    conf.addResource(new Path("file:///", System.getProperty("oozie.action.conf.xml")));
    conf.setVar(ConfVars.SEMANTIC_ANALYZER_HOOK, HCatSemanticAnalyzer.class.getName());
    conf.setBoolVar(ConfVars.METASTORE_USE_THRIFT_SASL, true);
    SessionState.start(new CliSessionState(conf));
    new CliDriver().processLine(args[0]);
  }
示例#30
0
 @After
 public void tearDown() throws Exception {
   SessionState.get().close();
   utils = null;
   work = null;
   task = null;
   path = null;
   fs = null;
 }