Ejemplo n.º 1
0
 public void run() {
   try {
     diagnosticMesg = showJobFailDebugInfo();
   } catch (IOException e) {
     console.printError(e.getMessage());
   }
 }
Ejemplo n.º 2
0
  @SuppressWarnings("unchecked")
  public static void main(String[] args) throws IOException, HiveException {

    String planFileName = null;
    String jobConfFileName = null;
    boolean noLog = false;
    String files = null;
    boolean localtask = false;
    try {
      for (int i = 0; i < args.length; i++) {
        if (args[i].equals("-plan")) {
          planFileName = args[++i];
        } else if (args[i].equals("-jobconffile")) {
          jobConfFileName = args[++i];
        } else if (args[i].equals("-nolog")) {
          noLog = true;
        } else if (args[i].equals("-files")) {
          files = args[++i];
        } else if (args[i].equals("-localtask")) {
          localtask = true;
        }
      }
    } catch (IndexOutOfBoundsException e) {
      System.err.println("Missing argument to option");
      printUsage();
    }

    JobConf conf;
    if (localtask) {
      conf = new JobConf(MapredLocalTask.class);
    } else {
      conf = new JobConf(ExecDriver.class);
    }

    if (jobConfFileName != null) {
      conf.addResource(new Path(jobConfFileName));
    }

    if (files != null) {
      conf.set("tmpfiles", files);
    }

    if (UserGroupInformation.isSecurityEnabled()) {
      String hadoopAuthToken = System.getenv(UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION);
      if (hadoopAuthToken != null) {
        conf.set("mapreduce.job.credentials.binary", hadoopAuthToken);
      }
    }

    boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);

    String queryId = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEQUERYID, "").trim();
    if (queryId.isEmpty()) {
      queryId = "unknown-" + System.currentTimeMillis();
    }
    System.setProperty(HiveConf.ConfVars.HIVEQUERYID.toString(), queryId);

    if (noLog) {
      // If started from main(), and noLog is on, we should not output
      // any logs. To turn the log on, please set -Dtest.silent=false
      org.apache.logging.log4j.Logger logger = org.apache.logging.log4j.LogManager.getRootLogger();
      NullAppender appender = NullAppender.createNullAppender();
      appender.addToLogger(logger.getName(), Level.ERROR);
      appender.start();
    } else {
      setupChildLog4j(conf);
    }

    Logger LOG = LoggerFactory.getLogger(ExecDriver.class.getName());
    LogHelper console = new LogHelper(LOG, isSilent);

    if (planFileName == null) {
      console.printError("Must specify Plan File Name");
      printUsage();
    }

    // print out the location of the log file for the user so
    // that it's easy to find reason for local mode execution failures
    for (Appender appender :
        ((org.apache.logging.log4j.core.Logger) LogManager.getRootLogger())
            .getAppenders()
            .values()) {
      if (appender instanceof FileAppender) {
        console.printInfo("Execution log at: " + ((FileAppender) appender).getFileName());
      } else if (appender instanceof RollingFileAppender) {
        console.printInfo("Execution log at: " + ((RollingFileAppender) appender).getFileName());
      }
    }

    // the plan file should always be in local directory
    Path p = new Path(planFileName);
    FileSystem fs = FileSystem.getLocal(conf);
    InputStream pathData = fs.open(p);

    // this is workaround for hadoop-17 - libjars are not added to classpath of the
    // child process. so we add it here explicitly

    String auxJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEAUXJARS);
    String addedJars = HiveConf.getVar(conf, HiveConf.ConfVars.HIVEADDEDJARS);
    try {
      // see also - code in CliDriver.java
      ClassLoader loader = conf.getClassLoader();
      if (StringUtils.isNotBlank(auxJars)) {
        loader = Utilities.addToClassPath(loader, StringUtils.split(auxJars, ","));
      }
      if (StringUtils.isNotBlank(addedJars)) {
        loader = Utilities.addToClassPath(loader, StringUtils.split(addedJars, ","));
      }
      conf.setClassLoader(loader);
      // Also set this to the Thread ContextClassLoader, so new threads will
      // inherit
      // this class loader, and propagate into newly created Configurations by
      // those
      // new threads.
      Thread.currentThread().setContextClassLoader(loader);
    } catch (Exception e) {
      throw new HiveException(e.getMessage(), e);
    }
    int ret;
    if (localtask) {
      memoryMXBean = ManagementFactory.getMemoryMXBean();
      MapredLocalWork plan = Utilities.deserializePlan(pathData, MapredLocalWork.class, conf);
      MapredLocalTask ed = new MapredLocalTask(plan, conf, isSilent);
      ret = ed.executeInProcess(new DriverContext());

    } else {
      MapredWork plan = Utilities.deserializePlan(pathData, MapredWork.class, conf);
      ExecDriver ed = new ExecDriver(plan, conf, isSilent);
      ret = ed.execute(new DriverContext());
    }

    if (ret != 0) {
      System.exit(ret);
    }
  }
Ejemplo n.º 3
0
  private String showJobFailDebugInfo() throws IOException {
    console.printError("Error during job, obtaining debugging information...");
    if (!conf.get("mapred.job.tracker", "local").equals("local")) {
      // Show Tracking URL for remotely running jobs.
      console.printError("Job Tracking URL: " + rj.getTrackingURL());
    }
    // Loop to get all task completion events because getTaskCompletionEvents
    // only returns a subset per call
    TaskInfoGrabber tlg = new TaskInfoGrabber();
    Thread t = new Thread(tlg);
    try {
      t.start();
      t.join(HiveConf.getIntVar(conf, HiveConf.ConfVars.TASKLOG_DEBUG_TIMEOUT));
    } catch (InterruptedException e) {
      console.printError(
          "Timed out trying to finish grabbing task log URLs, " + "some task info may be missing");
    }

    // Remove failures for tasks that succeeded
    for (String task : successes) {
      failures.remove(task);
    }

    if (failures.keySet().size() == 0) {
      return null;
    }
    // Find the highest failure count
    computeMaxFailures();

    // Display Error Message for tasks with the highest failure count
    String jtUrl = null;
    try {
      jtUrl = JobTrackerURLResolver.getURL(conf);
    } catch (Exception e) {
      console.printError("Unable to retrieve URL for Hadoop Task logs. " + e.getMessage());
    }

    String msg = null;
    for (String task : failures.keySet()) {
      if (failures.get(task).intValue() == maxFailures) {
        TaskInfo ti = taskIdToInfo.get(task);
        String jobId = ti.getJobId();
        String taskUrl =
            (jtUrl == null)
                ? null
                : jtUrl + "/taskdetails.jsp?jobid=" + jobId + "&tipid=" + task.toString();

        TaskLogProcessor tlp = new TaskLogProcessor(conf);
        for (String logUrl : ti.getLogUrls()) {
          tlp.addTaskAttemptLogUrl(logUrl);
        }

        if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.JOB_DEBUG_CAPTURE_STACKTRACES)
            && stackTraces != null) {
          if (!stackTraces.containsKey(jobId)) {
            stackTraces.put(jobId, new ArrayList<List<String>>());
          }
          stackTraces.get(jobId).addAll(tlp.getStackTraces());
        }

        if (HiveConf.getBoolVar(conf, HiveConf.ConfVars.SHOW_JOB_FAIL_DEBUG_INFO)) {
          List<ErrorAndSolution> errors = tlp.getErrors();

          StringBuilder sb = new StringBuilder();
          // We use a StringBuilder and then call printError only once as
          // printError will write to both stderr and the error log file. In
          // situations where both the stderr and the log file output is
          // simultaneously output to a single stream, this will look cleaner.
          sb.append("\n");
          sb.append("Task with the most failures(" + maxFailures + "): \n");
          sb.append("-----\n");
          sb.append("Task ID:\n  " + task + "\n\n");
          if (taskUrl != null) {
            sb.append("URL:\n  " + taskUrl + "\n");
          }

          for (ErrorAndSolution e : errors) {
            sb.append("\n");
            sb.append("Possible error:\n  " + e.getError() + "\n\n");
            sb.append("Solution:\n  " + e.getSolution() + "\n");
          }
          sb.append("-----\n");

          sb.append("Diagnostic Messages for this Task:\n");
          String[] diagMesgs = ti.getDiagnosticMesgs();
          for (String mesg : diagMesgs) {
            sb.append(mesg + "\n");
          }
          msg = sb.toString();
          console.printError(msg);
        }

        // Only print out one task because that's good enough for debugging.
        break;
      }
    }
    return msg;
  }
Ejemplo n.º 4
0
  public void run(
      SessionState sess,
      Set<ReadEntity> inputs,
      Set<WriteEntity> outputs,
      LineageInfo linfo,
      UserGroupInformation ugi)
      throws Exception {

    LogHelper console = SessionState.getConsole();

    if (console == null) {
      return;
    }

    if (sess != null) {
      console.printError("POSTHOOK: query: " + sess.getCmd().trim());
      console.printError("POSTHOOK: type: " + sess.getCommandType());
    }

    PreExecutePrinter.printEntities(console, inputs, "POSTHOOK: Input: ");
    PreExecutePrinter.printEntities(console, outputs, "POSTHOOK: Output: ");

    // Also print out the generic lineage information if there is any
    if (linfo != null) {
      LinkedList<Map.Entry<DependencyKey, Dependency>> entry_list =
          new LinkedList<Map.Entry<DependencyKey, Dependency>>(linfo.entrySet());
      Collections.sort(entry_list, new DependencyKeyComp());
      Iterator<Map.Entry<DependencyKey, Dependency>> iter = entry_list.iterator();
      while (iter.hasNext()) {
        Map.Entry<DependencyKey, Dependency> it = iter.next();
        Dependency dep = it.getValue();
        DependencyKey depK = it.getKey();

        if (dep == null) {
          continue;
        }

        StringBuilder sb = new StringBuilder();
        sb.append("POSTHOOK: Lineage: ");
        if (depK.getDataContainer().isPartition()) {
          Partition part = depK.getDataContainer().getPartition();
          sb.append(part.getTableName());
          sb.append(" PARTITION(");
          int i = 0;
          for (FieldSchema fs : depK.getDataContainer().getTable().getPartitionKeys()) {
            if (i != 0) {
              sb.append(",");
            }
            sb.append(fs.getName() + "=" + part.getValues().get(i++));
          }
          sb.append(")");
        } else {
          sb.append(depK.getDataContainer().getTable().getTableName());
        }
        sb.append("." + depK.getFieldSchema().getName() + " " + dep.getType() + " ");

        sb.append("[");
        for (BaseColumnInfo col : dep.getBaseCols()) {
          sb.append(
              "("
                  + col.getTabAlias().getTable().getTableName()
                  + ")"
                  + col.getTabAlias().getAlias()
                  + "."
                  + col.getColumn()
                  + ", ");
        }
        sb.append("]");

        console.printError(sb.toString());
      }
    }
  }
Ejemplo n.º 5
0
  public int processCmd(String cmd) throws TException, HiveException {
    SessionState ss = SessionState.get();
    ss.setiscli(true);

    String cmd_trimmed = cmd.trim();
    String[] tokens = cmd_trimmed.split("\\s+");
    String cmd_1 = cmd_trimmed.substring(tokens[0].length()).trim();
    int ret = 0;

    if (tokens[0].equalsIgnoreCase("delete")) {
      Vector<String> nexttoken = new Vector<String>();
      nexttoken.add("jar");
      nexttoken.add("file");
      nexttoken.add("from");
      if (tokens.length < 2 || !nexttoken.contains(tokens[1].toLowerCase())) {
        String errorMessage =
            "\nif delete resource:\n"
                + "Usage: delete [FILE|JAR] <value> [<value>]*\n"
                + "if delete table rows:\n"
                + "Usage: delete from tableName [where searchCondition]";
        console.printError(errorMessage);
        ret = 1;

        return ret;
      }
    }

    if (tokens[0].equalsIgnoreCase("dfs") || tokens[0].equalsIgnoreCase("zktest")) {
      String errorMessage = "\ntdw hive do not support " + tokens[0].toLowerCase() + " operation\n";
      throw new HiveException(errorMessage);
    }

    if (cmd_trimmed.toLowerCase().equals("quit") || cmd_trimmed.toLowerCase().equals("exit")) {

      System.exit(0);

    } else if (cmd_trimmed.startsWith("!")) {

      String shell_cmd = cmd_trimmed.substring(1);

      try {
        Process executor = Runtime.getRuntime().exec(shell_cmd);
        StreamPrinter outPrinter = new StreamPrinter(executor.getInputStream(), null, ss.out);
        StreamPrinter errPrinter = new StreamPrinter(executor.getErrorStream(), null, ss.err);

        outPrinter.start();
        errPrinter.start();

        ret = executor.waitFor();
        if (ret != 0) {
          console.printError("Command failed with exit code = " + ret);
        }
      } catch (Exception e) {
        console.printError(
            "Exception raised from Shell command " + e.getLocalizedMessage(),
            org.apache.hadoop.util.StringUtils.stringifyException(e));
        ret = 1;
      }

    } else if (tokens[0].toLowerCase().equals("list")) {

      SessionState.ResourceType t;
      if (tokens.length < 2 || (t = SessionState.find_resource_type(tokens[1])) == null) {
        console.printError(
            "Usage: list ["
                + StringUtils.join(SessionState.ResourceType.values(), "|")
                + "] [<value> [<value>]*]");
        ret = 1;
      } else {
        List<String> filter = null;
        if (tokens.length >= 3) {
          System.arraycopy(tokens, 2, tokens, 0, tokens.length - 2);
          filter = Arrays.asList(tokens);
        }
        Set<String> s = ss.list_resource(t, filter);
        if (s != null && !s.isEmpty()) ss.out.println(StringUtils.join(s, "\n"));
      }

    } else {
      CommandProcessor proc = CommandProcessorFactory.get(tokens);
      if (proc != null) {
        if (proc instanceof Driver) {
          Driver qp = (Driver) proc;
          PrintStream out = ss.out;
          long start = System.currentTimeMillis();

          try {
            ret = qp.run(cmd);
          } catch (Exception e1) {
            e1.printStackTrace();
          }
          if (ret != 0) {
            qp.close();
            return ret;
          }

          Vector<String> res = new Vector<String>();
          try {
            while (qp.getResults(res)) {
              for (String r : res) {
                out.println(r);
              }
              res.clear();
              if (out.checkError()) {
                break;
              }
            }
          } catch (IOException e) {
            console.printError(
                "Failed with exception " + e.getClass().getName() + ":" + e.getMessage(),
                "\n" + org.apache.hadoop.util.StringUtils.stringifyException(e));
            ret = 1;
          }

          int cret = qp.close();
          if (ret == 0) {
            ret = cret;
          }

          long end = System.currentTimeMillis();
          if (end > start) {
            double timeTaken = (double) (end - start) / 1000.0;
            console.printInfo("Time taken: " + timeTaken + " seconds", null);
          }

        } else {
          try {
            ret = proc.run(cmd_1);
          } catch (Exception e) {
            e.printStackTrace();
          }
        }
      }
    }

    return ret;
  }
Ejemplo n.º 6
0
  public static void main(String[] args) {

    ArrayList<String> jobConfArgs = new ArrayList<String>();

    String inputPathStr = null;
    String outputDir = null;

    try {
      for (int i = 0; i < args.length; i++) {
        if (args[i].equals("-input")) {
          inputPathStr = args[++i];
        } else if (args[i].equals("-jobconf")) {
          jobConfArgs.add(args[++i]);
        } else if (args[i].equals("-outputDir")) {
          outputDir = args[++i];
        }
      }
    } catch (IndexOutOfBoundsException e) {
      System.err.println("Missing argument to option");
      printUsage();
    }

    if (inputPathStr == null || outputDir == null || outputDir.trim().equals("")) {
      printUsage();
    }

    List<String> inputPaths = new ArrayList<String>();
    String[] paths = inputPathStr.split(INPUT_SEPERATOR);
    if (paths == null || paths.length == 0) {
      printUsage();
    }

    FileSystem fs = null;
    JobConf conf = new JobConf(BlockMergeTask.class);
    HiveConf hiveConf = new HiveConf(conf, BlockMergeTask.class);
    for (String path : paths) {
      try {
        Path pathObj = new Path(path);
        if (fs == null) {
          fs = FileSystem.get(pathObj.toUri(), conf);
        }
        FileStatus fstatus = fs.getFileStatus(pathObj);
        if (fstatus.isDir()) {
          FileStatus[] fileStatus = fs.listStatus(pathObj);
          for (FileStatus st : fileStatus) {
            inputPaths.add(st.getPath().toString());
          }
        } else {
          inputPaths.add(fstatus.getPath().toString());
        }
      } catch (IOException e) {
        e.printStackTrace(System.err);
      }
    }

    StringBuilder sb = new StringBuilder("JobConf:\n");

    for (String one : jobConfArgs) {
      int eqIndex = one.indexOf('=');
      if (eqIndex != -1) {
        try {
          String key = one.substring(0, eqIndex);
          String value = URLDecoder.decode(one.substring(eqIndex + 1), "UTF-8");
          conf.set(key, value);
          sb.append(key).append("=").append(value).append("\n");
        } catch (UnsupportedEncodingException e) {
          System.err.println(
              "Unexpected error "
                  + e.getMessage()
                  + " while encoding "
                  + one.substring(eqIndex + 1));
          System.exit(3);
        }
      }
    }

    Log LOG = LogFactory.getLog(BlockMergeTask.class.getName());
    boolean isSilent = HiveConf.getBoolVar(conf, HiveConf.ConfVars.HIVESESSIONSILENT);
    LogHelper console = new LogHelper(LOG, isSilent);

    // print out the location of the log file for the user so
    // that it's easy to find reason for local mode execution failures
    for (Appender appender :
        Collections.list((Enumeration<Appender>) LogManager.getRootLogger().getAllAppenders())) {
      if (appender instanceof FileAppender) {
        console.printInfo("Execution log at: " + ((FileAppender) appender).getFile());
      }
    }

    // log the list of job conf parameters for reference
    LOG.info(sb.toString());

    MergeWork mergeWork = new MergeWork(inputPaths, outputDir);
    DriverContext driverCxt = new DriverContext();
    BlockMergeTask taskExec = new BlockMergeTask();
    taskExec.initialize(hiveConf, null, driverCxt);
    taskExec.setWork(mergeWork);
    int ret = taskExec.execute(driverCxt);

    if (ret != 0) {
      System.exit(2);
    }
  }