Example #1
0
 /**
  * Returns the current UGI on the stack
  *
  * @param opConfig
  * @return UserGroupInformation
  * @throws HiveSQLException
  */
 private UserGroupInformation getCurrentUGI(HiveConf opConfig) throws HiveSQLException {
   try {
     return Utils.getUGI();
   } catch (Exception e) {
     throw new HiveSQLException("Unable to get current user", e);
   }
 }
Example #2
0
  /**
   * set current session to existing session object if a thread is running multiple sessions - it
   * must call this method with the new session object when switching from one session to another.
   */
  public static SessionState start(SessionState startSs) {
    setCurrentSessionState(startSs);

    if (startSs.hiveHist == null) {
      if (startSs.getConf().getBoolVar(HiveConf.ConfVars.HIVE_SESSION_HISTORY_ENABLED)) {
        startSs.hiveHist = new HiveHistoryImpl(startSs);
      } else {
        // Hive history is disabled, create a no-op proxy
        startSs.hiveHist = HiveHistoryProxyHandler.getNoOpHiveHistoryProxy();
      }
    }

    // Get the following out of the way when you start the session these take a
    // while and should be done when we start up.
    try {
      // Hive object instance should be created with a copy of the conf object. If the conf is
      // shared with SessionState, other parts of the code might update the config, but
      // Hive.get(HiveConf) would not recognize the case when it needs refreshing
      Hive.get(new HiveConf(startSs.conf)).getMSC();
      UserGroupInformation sessionUGI = Utils.getUGI();
      FileSystem.get(startSs.conf);

      // Create scratch dirs for this session
      startSs.createSessionDirs(sessionUGI.getShortUserName());

      // Set temp file containing results to be sent to HiveClient
      if (startSs.getTmpOutputFile() == null) {
        try {
          startSs.setTmpOutputFile(createTempFile(startSs.getConf()));
        } catch (IOException e) {
          throw new RuntimeException(e);
        }
      }

    } catch (Exception e) {
      // Catch-all due to some exec time dependencies on session state
      // that would cause ClassNoFoundException otherwise
      throw new RuntimeException(e);
    }

    if (HiveConf.getVar(startSs.getConf(), HiveConf.ConfVars.HIVE_EXECUTION_ENGINE).equals("tez")
        && (startSs.isHiveServerQuery == false)) {
      try {
        if (startSs.tezSessionState == null) {
          startSs.tezSessionState = new TezSessionState(startSs.getSessionId());
        }
        if (!startSs.tezSessionState.isOpen()) {
          startSs.tezSessionState.open(startSs.conf); // should use conf on session start-up
        }
      } catch (Exception e) {
        throw new RuntimeException(e);
      }
    } else {
      LOG.info("No Tez session required at this point. hive.execution.engine=mr.");
    }
    return startSs;
  }
Example #3
0
 // Lookup the delegation token. First in the connection URL, then Configuration
 private String getClientDelegationToken(Map<String, String> jdbcConnConf) throws SQLException {
   String tokenStr = null;
   if (JdbcConnectionParams.AUTH_TOKEN.equalsIgnoreCase(
       jdbcConnConf.get(JdbcConnectionParams.AUTH_TYPE))) {
     // check delegation token in job conf if any
     try {
       tokenStr =
           org.apache.hadoop.hive.shims.Utils.getTokenStrForm(HiveAuthFactory.HS2_CLIENT_TOKEN);
     } catch (IOException e) {
       throw new SQLException("Error reading token ", e);
     }
   }
   return tokenStr;
 }
Example #4
0
  @Override
  public synchronized void init(HiveConf hiveConf) {
    this.hiveConf = hiveConf;
    sessionManager = new SessionManager(hiveServer2);
    defaultFetchRows = hiveConf.getIntVar(ConfVars.HIVE_SERVER2_RESULTSET_DEFAULT_FETCH_SIZE);
    addService(sessionManager);
    //  If the hadoop cluster is secure, do a kerberos login for the service from the keytab
    if (UserGroupInformation.isSecurityEnabled()) {
      try {
        HiveAuthFactory.loginFromKeytab(hiveConf);
        this.serviceUGI = Utils.getUGI();
      } catch (IOException e) {
        throw new ServiceException("Unable to login to kerberos with given principal/keytab", e);
      } catch (LoginException e) {
        throw new ServiceException("Unable to login to kerberos with given principal/keytab", e);
      }

      // Also try creating a UGI object for the SPNego principal
      String principal = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_PRINCIPAL);
      String keyTabFile = hiveConf.getVar(ConfVars.HIVE_SERVER2_SPNEGO_KEYTAB);
      if (principal.isEmpty() || keyTabFile.isEmpty()) {
        LOG.info(
            "SPNego httpUGI not created, spNegoPrincipal: "
                + principal
                + ", ketabFile: "
                + keyTabFile);
      } else {
        try {
          this.httpUGI = HiveAuthFactory.loginFromSpnegoKeytabAndReturnUGI(hiveConf);
          LOG.info("SPNego httpUGI successfully created.");
        } catch (IOException e) {
          LOG.warn("SPNego httpUGI creation failed: ", e);
        }
      }
    }
    // creates connection to HMS and thus *must* occur after kerberos login above
    try {
      applyAuthorizationConfigPolicy(hiveConf);
    } catch (Exception e) {
      throw new RuntimeException(
          "Error applying authorization policy on hive configuration: " + e.getMessage(), e);
    }
    setupBlockedUdfs();
    super.init(hiveConf);
  }
Example #5
0
  public int executeInChildVM(DriverContext driverContext) {
    // execute in child jvm
    try {
      // generate the cmd line to run in the child jvm
      Context ctx = driverContext.getCtx();
      String hiveJar = conf.getJar();

      String hadoopExec = conf.getVar(HiveConf.ConfVars.HADOOPBIN);
      conf.setVar(
          ConfVars.HIVEADDEDJARS, Utilities.getResourceFiles(conf, SessionState.ResourceType.JAR));
      // write out the plan to a local file
      Path planPath = new Path(ctx.getLocalTmpPath(), "plan.xml");
      MapredLocalWork plan = getWork();
      LOG.info("Generating plan file " + planPath.toString());

      OutputStream out = null;
      try {
        out = FileSystem.getLocal(conf).create(planPath);
        SerializationUtilities.serializePlan(plan, out);
        out.close();
        out = null;
      } finally {
        IOUtils.closeQuietly(out);
      }

      String isSilent = "true".equalsIgnoreCase(System.getProperty("test.silent")) ? "-nolog" : "";

      String jarCmd;

      jarCmd = hiveJar + " " + ExecDriver.class.getName();
      String hiveConfArgs = ExecDriver.generateCmdLine(conf, ctx);
      String cmdLine =
          hadoopExec
              + " jar "
              + jarCmd
              + " -localtask -plan "
              + planPath.toString()
              + " "
              + isSilent
              + " "
              + hiveConfArgs;

      String workDir = (new File(".")).getCanonicalPath();
      String files = Utilities.getResourceFiles(conf, SessionState.ResourceType.FILE);

      if (!files.isEmpty()) {
        cmdLine = cmdLine + " -files " + files;

        workDir = ctx.getLocalTmpPath().toUri().getPath();

        if (!(new File(workDir)).mkdir()) {
          throw new IOException("Cannot create tmp working dir: " + workDir);
        }

        for (String f : StringUtils.split(files, ',')) {
          Path p = new Path(f);
          String target = p.toUri().getPath();
          String link = workDir + Path.SEPARATOR + p.getName();
          if (FileUtil.symLink(target, link) != 0) {
            throw new IOException("Cannot link to added file: " + target + " from: " + link);
          }
        }
      }

      // Inherit Java system variables
      String hadoopOpts;
      StringBuilder sb = new StringBuilder();
      Properties p = System.getProperties();
      for (String element : HIVE_SYS_PROP) {
        if (p.containsKey(element)) {
          sb.append(" -D" + element + "=" + p.getProperty(element));
        }
      }
      hadoopOpts = sb.toString();
      // Inherit the environment variables
      String[] env;
      Map<String, String> variables = new HashMap<String, String>(System.getenv());
      // The user can specify the hadoop memory

      // if ("local".equals(conf.getVar(HiveConf.ConfVars.HADOOPJT))) {
      // if we are running in local mode - then the amount of memory used
      // by the child jvm can no longer default to the memory used by the
      // parent jvm
      // int hadoopMem = conf.getIntVar(HiveConf.ConfVars.HIVEHADOOPMAXMEM);
      int hadoopMem = conf.getIntVar(HiveConf.ConfVars.HIVEHADOOPMAXMEM);
      if (hadoopMem == 0) {
        // remove env var that would default child jvm to use parent's memory
        // as default. child jvm would use default memory for a hadoop client
        variables.remove(HADOOP_MEM_KEY);
      } else {
        // user specified the memory for local mode hadoop run
        console.printInfo(" set heap size\t" + hadoopMem + "MB");
        variables.put(HADOOP_MEM_KEY, String.valueOf(hadoopMem));
      }
      // } else {
      // nothing to do - we are not running in local mode - only submitting
      // the job via a child process. in this case it's appropriate that the
      // child jvm use the same memory as the parent jvm

      // }

      // Set HADOOP_USER_NAME env variable for child process, so that
      // it also runs with hadoop permissions for the user the job is running as
      // This will be used by hadoop only in unsecure(/non kerberos) mode
      String endUserName = Utils.getUGI().getShortUserName();
      LOG.debug("setting HADOOP_USER_NAME\t" + endUserName);
      variables.put("HADOOP_USER_NAME", endUserName);

      if (variables.containsKey(HADOOP_OPTS_KEY)) {
        variables.put(HADOOP_OPTS_KEY, variables.get(HADOOP_OPTS_KEY) + hadoopOpts);
      } else {
        variables.put(HADOOP_OPTS_KEY, hadoopOpts);
      }

      // For Windows OS, we need to pass HIVE_HADOOP_CLASSPATH Java parameter while starting
      // Hiveserver2 using "-hiveconf hive.hadoop.classpath=%HIVE_LIB%". This is to combine path(s).
      if (HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_HADOOP_CLASSPATH) != null) {
        if (variables.containsKey("HADOOP_CLASSPATH")) {
          variables.put(
              "HADOOP_CLASSPATH",
              variables.get("HADOOP_CLASSPATH")
                  + ";"
                  + HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_HADOOP_CLASSPATH));
        } else {
          variables.put(
              "HADOOP_CLASSPATH", HiveConf.getVar(conf, HiveConf.ConfVars.HIVE_HADOOP_CLASSPATH));
        }
      }

      if (variables.containsKey(MapRedTask.HIVE_DEBUG_RECURSIVE)) {
        MapRedTask.configureDebugVariablesForChildJVM(variables);
      }

      if (UserGroupInformation.isSecurityEnabled() && UserGroupInformation.isLoginKeytabBased()) {
        // If kerberos security is enabled, and HS2 doAs is enabled,
        // then additional params need to be set so that the command is run as
        // intended user
        secureDoAs = new SecureCmdDoAs(conf);
        secureDoAs.addEnv(variables);
      }

      // If HIVE_LOCAL_TASK_CHILD_OPTS is set, child VM environment setting
      // HADOOP_CLIENT_OPTS will be replaced with HIVE_LOCAL_TASK_CHILD_OPTS.
      // HADOOP_OPTS is updated too since HADOOP_CLIENT_OPTS is appended
      // to HADOOP_OPTS in most cases. This way, the local task JVM can
      // have different settings from those of HiveServer2.
      if (variables.containsKey(HIVE_LOCAL_TASK_CHILD_OPTS_KEY)) {
        String childOpts = variables.get(HIVE_LOCAL_TASK_CHILD_OPTS_KEY);
        if (childOpts == null) {
          childOpts = "";
        }
        String clientOpts = variables.put(HADOOP_CLIENT_OPTS, childOpts);
        String tmp = variables.get(HADOOP_OPTS_KEY);
        if (tmp != null && !StringUtils.isBlank(clientOpts)) {
          tmp = tmp.replace(clientOpts, childOpts);
          variables.put(HADOOP_OPTS_KEY, tmp);
        }
      }

      env = new String[variables.size()];
      int pos = 0;
      for (Map.Entry<String, String> entry : variables.entrySet()) {
        String name = entry.getKey();
        String value = entry.getValue();
        env[pos++] = name + "=" + value;
        LOG.debug("Setting env: " + env[pos - 1]);
      }

      LOG.info("Executing: " + cmdLine);

      // Run ExecDriver in another JVM
      executor = Runtime.getRuntime().exec(cmdLine, env, new File(workDir));

      CachingPrintStream errPrintStream = new CachingPrintStream(System.err);

      StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, System.out);
      StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, errPrintStream);

      outPrinter.start();
      errPrinter.start();

      int exitVal = jobExecHelper.progressLocal(executor, getId());

      // wait for stream threads to finish
      outPrinter.join();
      errPrinter.join();

      if (exitVal != 0) {
        LOG.error("Execution failed with exit status: " + exitVal);
        if (SessionState.get() != null) {
          SessionState.get().addLocalMapRedErrors(getId(), errPrintStream.getOutput());
        }
      } else {
        LOG.info("Execution completed successfully");
      }

      return exitVal;
    } catch (Exception e) {
      LOG.error("Exception: " + e, e);
      return (1);
    } finally {
      if (secureDoAs != null) {
        secureDoAs.close();
      }
    }
  }
 // Store the given token in the UGI
 private void storeToken(String tokenStr, UserGroupInformation ugi) throws Exception {
   Utils.setTokenStr(ugi, tokenStr, HiveAuthFactory.HS2_CLIENT_TOKEN);
 }
Example #7
0
  @Override
  protected void doPost(HttpServletRequest request, HttpServletResponse response)
      throws ServletException, IOException {
    String clientUserName = null;
    String clientIpAddress;
    boolean requireNewCookie = false;

    try {
      if (hiveConf.getBoolean(ConfVars.HIVE_SERVER2_XSRF_FILTER_ENABLED.varname, false)) {
        boolean continueProcessing = Utils.doXsrfFilter(request, response, null, null);
        if (!continueProcessing) {
          LOG.warn("Request did not have valid XSRF header, rejecting.");
          return;
        }
      }
      // If the cookie based authentication is already enabled, parse the
      // request and validate the request cookies.
      if (isCookieAuthEnabled) {
        clientUserName = validateCookie(request);
        requireNewCookie = (clientUserName == null);
        if (requireNewCookie) {
          LOG.info("Could not validate cookie sent, will try to generate a new cookie");
        }
      }
      // If the cookie based authentication is not enabled or the request does
      // not have a valid cookie, use the kerberos or password based authentication
      // depending on the server setup.
      if (clientUserName == null) {
        // For a kerberos setup
        if (isKerberosAuthMode(authType)) {
          String delegationToken = request.getHeader(HIVE_DELEGATION_TOKEN_HEADER);
          // Each http request must have an Authorization header
          if ((delegationToken != null) && (!delegationToken.isEmpty())) {
            clientUserName = doTokenAuth(request, response);
          } else {
            clientUserName = doKerberosAuth(request);
          }
        }
        // For password based authentication
        else {
          clientUserName = doPasswdAuth(request, authType);
        }
      }
      LOG.debug("Client username: "******"Client IP Address: " + clientIpAddress);
      // Set the thread local ip address
      SessionManager.setIpAddress(clientIpAddress);

      // get forwarded hosts address
      String forwarded_for = request.getHeader(X_FORWARDED_FOR);
      if (forwarded_for != null) {
        LOG.debug("{}:{}", X_FORWARDED_FOR, forwarded_for);
        List<String> forwardedAddresses = Arrays.asList(forwarded_for.split(","));
        SessionManager.setForwardedAddresses(forwardedAddresses);
      } else {
        SessionManager.setForwardedAddresses(Collections.<String>emptyList());
      }

      // Generate new cookie and add it to the response
      if (requireNewCookie
          && !authType.equalsIgnoreCase(HiveAuthFactory.AuthTypes.NOSASL.toString())) {
        String cookieToken = HttpAuthUtils.createCookieToken(clientUserName);
        Cookie hs2Cookie = createCookie(signer.signCookie(cookieToken));

        if (isHttpOnlyCookie) {
          response.setHeader("SET-COOKIE", getHttpOnlyCookieHeader(hs2Cookie));
        } else {
          response.addCookie(hs2Cookie);
        }
        LOG.info("Cookie added for clientUserName " + clientUserName);
      }
      super.doPost(request, response);
    } catch (HttpAuthenticationException e) {
      LOG.error("Error: ", e);
      // Send a 401 to the client
      response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
      if (isKerberosAuthMode(authType)) {
        response.addHeader(HttpAuthUtils.WWW_AUTHENTICATE, HttpAuthUtils.NEGOTIATE);
      }
      response.getWriter().println("Authentication Error: " + e.getMessage());
    } finally {
      // Clear the thread locals
      SessionManager.clearUserName();
      SessionManager.clearIpAddress();
      SessionManager.clearProxyUserName();
      SessionManager.clearForwardedAddresses();
    }
  }