示例#1
0
  /** Initialization when invoked from QL. */
  @Override
  public void initialize(HiveConf conf, QueryPlan queryPlan, DriverContext driverContext) {
    super.initialize(conf, queryPlan, driverContext);

    job = new JobConf(conf, ExecDriver.class);

    // NOTE: initialize is only called if it is in non-local mode.
    // In case it's in non-local mode, we need to move the SessionState files
    // and jars to jobConf.
    // In case it's in local mode, MapRedTask will set the jobConf.
    //
    // "tmpfiles" and "tmpjars" are set by the method ExecDriver.execute(),
    // which will be called by both local and NON-local mode.
    String addedFiles = Utilities.getResourceFiles(job, SessionState.ResourceType.FILE);
    if (StringUtils.isNotBlank(addedFiles)) {
      HiveConf.setVar(job, ConfVars.HIVEADDEDFILES, addedFiles);
    }
    String addedJars = Utilities.getResourceFiles(job, SessionState.ResourceType.JAR);
    if (StringUtils.isNotBlank(addedJars)) {
      HiveConf.setVar(job, ConfVars.HIVEADDEDJARS, addedJars);
    }
    String addedArchives = Utilities.getResourceFiles(job, SessionState.ResourceType.ARCHIVE);
    if (StringUtils.isNotBlank(addedArchives)) {
      HiveConf.setVar(job, ConfVars.HIVEADDEDARCHIVES, addedArchives);
    }
    conf.stripHiddenConfigurations(job);
    this.jobExecHelper = new HadoopJobExecHelper(job, console, this, this);
  }
示例#2
0
文件: ATSHook.java 项目: apache/hive
  TimelineEntity createPreHookEvent(
      String queryId,
      String query,
      JSONObject explainPlan,
      long startTime,
      String user,
      String requestuser,
      int numMrJobs,
      int numTezJobs,
      String opId,
      String clientIpAddress,
      String hiveInstanceAddress,
      String hiveInstanceType,
      String sessionID,
      String logID,
      String threadId,
      String executionMode,
      List<String> tablesRead,
      List<String> tablesWritten,
      HiveConf conf)
      throws Exception {

    JSONObject queryObj = new JSONObject(new LinkedHashMap<>());
    queryObj.put("queryText", query);
    queryObj.put("queryPlan", explainPlan);

    LOG.info("Received pre-hook notification for :" + queryId);
    if (LOG.isDebugEnabled()) {
      LOG.debug("Otherinfo: " + queryObj.toString());
      LOG.debug("Operation id: <" + opId + ">");
    }

    conf.stripHiddenConfigurations(conf);
    Map<String, String> confMap = new HashMap<String, String>();
    for (Map.Entry<String, String> setting : conf) {
      confMap.put(setting.getKey(), setting.getValue());
    }
    JSONObject confObj = new JSONObject((Map) confMap);

    TimelineEntity atsEntity = new TimelineEntity();
    atsEntity.setEntityId(queryId);
    atsEntity.setEntityType(EntityTypes.HIVE_QUERY_ID.name());
    atsEntity.addPrimaryFilter(PrimaryFilterTypes.user.name(), user);
    atsEntity.addPrimaryFilter(PrimaryFilterTypes.requestuser.name(), requestuser);
    atsEntity.addPrimaryFilter(PrimaryFilterTypes.executionmode.name(), executionMode);
    atsEntity.addPrimaryFilter(
        PrimaryFilterTypes.queue.name(), conf.get("mapreduce.job.queuename"));

    if (opId != null) {
      atsEntity.addPrimaryFilter(PrimaryFilterTypes.operationid.name(), opId);
    }

    for (String tabName : tablesRead) {
      atsEntity.addPrimaryFilter(PrimaryFilterTypes.tablesread.name(), tabName);
    }
    for (String tabName : tablesWritten) {
      atsEntity.addPrimaryFilter(PrimaryFilterTypes.tableswritten.name(), tabName);
    }

    TimelineEvent startEvt = new TimelineEvent();
    startEvt.setEventType(EventTypes.QUERY_SUBMITTED.name());
    startEvt.setTimestamp(startTime);
    atsEntity.addEvent(startEvt);

    atsEntity.addOtherInfo(OtherInfoTypes.QUERY.name(), queryObj.toString());
    atsEntity.addOtherInfo(OtherInfoTypes.TEZ.name(), numTezJobs > 0);
    atsEntity.addOtherInfo(OtherInfoTypes.MAPRED.name(), numMrJobs > 0);
    atsEntity.addOtherInfo(OtherInfoTypes.SESSION_ID.name(), sessionID);
    atsEntity.addOtherInfo(OtherInfoTypes.INVOKER_INFO.name(), logID);
    atsEntity.addOtherInfo(OtherInfoTypes.THREAD_NAME.name(), threadId);
    atsEntity.addOtherInfo(OtherInfoTypes.VERSION.name(), VERSION);
    if (clientIpAddress != null) {
      atsEntity.addOtherInfo(OtherInfoTypes.CLIENT_IP_ADDRESS.name(), clientIpAddress);
    }
    atsEntity.addOtherInfo(OtherInfoTypes.HIVE_ADDRESS.name(), hiveInstanceAddress);
    atsEntity.addOtherInfo(OtherInfoTypes.HIVE_INSTANCE_TYPE.name(), hiveInstanceType);
    atsEntity.addOtherInfo(OtherInfoTypes.CONF.name(), confObj.toString());

    return atsEntity;
  }
示例#3
0
 public Builder setConf(HiveConf origConf) {
   this.conf = new HiveConf(origConf);
   origConf.stripHiddenConfigurations(conf);
   setContextAttribute(CONF_CONTEXT_ATTRIBUTE, conf);
   return this;
 }