コード例 #1
0
  /**
   * Logs job killed event. Closes the job history log file.
   *
   * @param timestamp time when job killed was issued in ms.
   * @param finishedMaps no finished map tasks.
   * @param finishedReduces no of finished reduce tasks.
   */
  public void logKilled(long timestamp, int finishedMaps, int finishedReduces, Counters counters) {
    if (disableHistory) {
      return;
    }

    if (null != writers) {
      log(
          writers,
          RecordTypes.Job,
          new Keys[] {
            Keys.JOBID,
            Keys.FINISH_TIME,
            Keys.JOB_STATUS,
            Keys.FINISHED_MAPS,
            Keys.FINISHED_REDUCES,
            Keys.COUNTERS
          },
          new String[] {
            jobId.toString(),
            String.valueOf(timestamp),
            Values.KILLED.name(),
            String.valueOf(finishedMaps),
            String.valueOf(finishedReduces),
            counters.makeEscapedCompactString()
          },
          true);
      closeAndClear(writers);
    }
  }
コード例 #2
0
 private File localizeJob(JobID jobid) throws IOException {
   String user = UserGroupInformation.getCurrentUser().getShortUserName();
   new JobLocalizer(tt.getJobConf(), user, jobid.toString()).initializeJobLogDir();
   File jobUserlog = TaskLog.getJobDir(jobid);
   JobConf conf = new JobConf();
   // localize job log directory
   tt.saveLogDir(jobid, conf);
   assertTrue(jobUserlog + " directory is not created.", jobUserlog.exists());
   return jobUserlog;
 }
コード例 #3
0
  /**
   * Log job finished. closes the job file in history.
   *
   * @param finishTime finish time of job in ms.
   * @param finishedMaps no of maps successfully finished.
   * @param finishedReduces no of reduces finished sucessfully.
   * @param failedMaps no of failed map tasks. (includes killed)
   * @param failedReduces no of failed reduce tasks. (includes killed)
   * @param killedMaps no of killed map tasks.
   * @param killedReduces no of killed reduce tasks.
   * @param counters the counters from the job
   */
  public void logFinished(
      long finishTime,
      int finishedMaps,
      int finishedReduces,
      int failedMaps,
      int failedReduces,
      int killedMaps,
      int killedReduces,
      Counters mapCounters,
      Counters reduceCounters,
      Counters counters) {
    if (disableHistory) {
      return;
    }

    if (null != writers) {
      log(
          writers,
          RecordTypes.Job,
          new Keys[] {
            Keys.JOBID,
            Keys.FINISH_TIME,
            Keys.JOB_STATUS,
            Keys.FINISHED_MAPS,
            Keys.FINISHED_REDUCES,
            Keys.FAILED_MAPS,
            Keys.FAILED_REDUCES,
            Keys.KILLED_MAPS,
            Keys.KILLED_REDUCES,
            Keys.MAP_COUNTERS,
            Keys.REDUCE_COUNTERS,
            Keys.COUNTERS
          },
          new String[] {
            jobId.toString(),
            Long.toString(finishTime),
            Values.SUCCESS.name(),
            String.valueOf(finishedMaps),
            String.valueOf(finishedReduces),
            String.valueOf(failedMaps),
            String.valueOf(failedReduces),
            String.valueOf(killedMaps),
            String.valueOf(killedReduces),
            mapCounters.makeEscapedCompactString(),
            reduceCounters.makeEscapedCompactString(),
            counters.makeEscapedCompactString()
          },
          true);

      closeAndClear(writers);
    }

    // NOTE: history cleaning stuff deleted from here. We should do that
    // somewhere else!
  }
コード例 #4
0
  /**
   * Log job's priority.
   *
   * @param priority Jobs priority
   */
  public void logJobPriority(JobID jobid, JobPriority priority) {
    if (disableHistory) {
      return;
    }

    if (null != writers) {
      log(
          writers,
          RecordTypes.Job,
          new Keys[] {Keys.JOBID, Keys.JOB_PRIORITY},
          new String[] {jobId.toString(), priority.toString()});
    }
  }
コード例 #5
0
  /** Logs job as running */
  public void logStarted() {
    if (disableHistory) {
      return;
    }

    if (null != writers) {
      log(
          writers,
          RecordTypes.Job,
          new Keys[] {Keys.JOBID, Keys.JOB_STATUS},
          new String[] {jobId.toString(), Values.RUNNING.name()});
    }
  }
コード例 #6
0
  /**
   * Logs launch time of job.
   *
   * @param startTime start time of job.
   * @param totalMaps total maps assigned by jobtracker.
   * @param totalReduces total reduces.
   */
  public void logInited(long startTime, int totalMaps, int totalReduces) {
    if (disableHistory) {
      return;
    }

    if (null != writers) {
      log(
          writers,
          RecordTypes.Job,
          new Keys[] {
            Keys.JOBID, Keys.LAUNCH_TIME, Keys.TOTAL_MAPS, Keys.TOTAL_REDUCES, Keys.JOB_STATUS
          },
          new String[] {
            jobId.toString(),
            String.valueOf(startTime),
            String.valueOf(totalMaps),
            String.valueOf(totalReduces),
            Values.PREP.name()
          });
    }
  }
コード例 #7
0
  @SuppressWarnings("deprecation")
  public SimulatorJobInProgress(
      JobID jobid, JobTracker jobtracker, JobConf default_conf, JobStory jobStory) {
    super(jobid, jobStory.getJobConf(), jobtracker);
    // jobSetupCleanupNeeded set to false in parent cstr, though
    // default is true

    restartCount = 0;
    jobSetupCleanupNeeded = false;

    this.memoryPerMap = conf.getMemoryForMapTask();
    this.memoryPerReduce = conf.getMemoryForReduceTask();
    this.maxTaskFailuresPerTracker = conf.getMaxTaskFailuresPerTracker();

    this.jobId = jobid;
    String url =
        "http://"
            + jobtracker.getJobTrackerMachine()
            + ":"
            + jobtracker.getInfoPort()
            + "/jobdetails.jsp?jobid="
            + jobid;
    this.jobtracker = jobtracker;
    this.conf = jobStory.getJobConf();
    this.priority = conf.getJobPriority();
    Path jobDir = jobtracker.getSystemDirectoryForJob(jobid);
    this.jobFile = new Path(jobDir, "job.xml");
    this.status =
        new JobStatus(jobid, 0.0f, 0.0f, 0.0f, 0.0f, JobStatus.PREP, priority, conf.getUser());
    this.profile =
        new JobProfile(
            jobStory.getUser(),
            jobid,
            this.jobFile.toString(),
            url,
            jobStory.getName(),
            conf.getQueueName());
    this.startTime = JobTracker.getClock().getTime();
    status.setStartTime(startTime);
    this.resourceEstimator = new ResourceEstimator(this);

    this.numMapTasks = jobStory.getNumberMaps();
    this.numReduceTasks = jobStory.getNumberReduces();
    this.taskCompletionEvents =
        new ArrayList<TaskCompletionEvent>(numMapTasks + numReduceTasks + 10);

    this.mapFailuresPercent = conf.getMaxMapTaskFailuresPercent();
    this.reduceFailuresPercent = conf.getMaxReduceTaskFailuresPercent();
    MetricsContext metricsContext = MetricsUtil.getContext("mapred");
    this.jobMetrics = MetricsUtil.createRecord(metricsContext, "job");
    this.jobMetrics.setTag("user", conf.getUser());
    this.jobMetrics.setTag("sessionId", conf.getSessionId());
    this.jobMetrics.setTag("jobName", conf.getJobName());
    this.jobMetrics.setTag("jobId", jobid.toString());

    this.maxLevel = jobtracker.getNumTaskCacheLevels();
    this.anyCacheLevel = this.maxLevel + 1;
    this.nonLocalMaps = new LinkedList<TaskInProgress>();
    this.nonLocalRunningMaps = new LinkedHashSet<TaskInProgress>();
    this.runningMapCache = new IdentityHashMap<Node, Set<TaskInProgress>>();
    this.nonRunningReduces = new LinkedList<TaskInProgress>();
    this.runningReduces = new LinkedHashSet<TaskInProgress>();
    this.slowTaskThreshold =
        Math.max(0.0f, conf.getFloat("mapred.speculative.execution.slowTaskThreshold", 1.0f));
    this.speculativeCap = conf.getFloat("mapred.speculative.execution.speculativeCap", 0.1f);
    this.slowNodeThreshold = conf.getFloat("mapred.speculative.execution.slowNodeThreshold", 1.0f);

    this.jobStory = jobStory;
    //    this.jobHistory = this.jobtracker.getJobHistory();
  }
コード例 #8
0
  /**
   * Log job submitted event to history. Creates a new file in history for the job. if history file
   * creation fails, it disables history for all other events.
   *
   * @param jobConfPath path to job conf xml file in HDFS.
   * @param submitTime time when job tracker received the job
   * @throws IOException
   */
  public void logSubmitted(String jobConfPath, long submitTime, String jobTrackerId)
      throws IOException {

    if (disableHistory) {
      return;
    }

    // create output stream for logging in hadoop.job.history.location
    int defaultBufferSize = logDirFs.getConf().getInt("io.file.buffer.size", 4096);

    try {
      FSDataOutputStream out = null;
      PrintWriter writer = null;

      // In case the old JT is still running, but we can't connect to it, we
      // should ensure that it won't write to our (new JT's) job history file.
      if (logDirFs.exists(logFile)) {
        LOG.info("Remove the old history file " + logFile);
        logDirFs.delete(logFile, true);
      }

      out =
          logDirFs.create(
              logFile,
              new FsPermission(HISTORY_FILE_PERMISSION),
              true,
              defaultBufferSize,
              logDirFs.getDefaultReplication(),
              jobHistoryBlockSize,
              null);

      writer = new PrintWriter(out);

      fileManager.addWriter(jobId, writer);

      // cache it ...
      fileManager.setHistoryFile(jobId, logFile);

      writers = fileManager.getWriters(jobId);
      if (null != writers) {
        log(
            writers,
            RecordTypes.Meta,
            new Keys[] {Keys.VERSION},
            new String[] {String.valueOf(JobHistory.VERSION)});
      }

      String jobName = getJobName();
      String user = getUserName();

      // add to writer as well
      log(
          writers,
          RecordTypes.Job,
          new Keys[] {
            Keys.JOBID, Keys.JOBNAME, Keys.USER, Keys.SUBMIT_TIME, Keys.JOBCONF, Keys.JOBTRACKERID
          },
          new String[] {
            jobId.toString(), jobName, user,
            String.valueOf(submitTime), jobConfPath, jobTrackerId
          });

    } catch (IOException e) {
      // Disable history if we have errors other than in the user log.
      disableHistory = true;
    }

    /* Storing the job conf on the log dir */
    Path jobFilePath = new Path(logDir, CoronaJobHistoryFilesManager.getConfFilename(jobId));
    fileManager.setConfFile(jobId, jobFilePath);
    FSDataOutputStream jobFileOut = null;
    try {
      if (!logDirFs.exists(jobFilePath)) {
        jobFileOut =
            logDirFs.create(
                jobFilePath,
                new FsPermission(HISTORY_FILE_PERMISSION),
                true,
                defaultBufferSize,
                logDirFs.getDefaultReplication(),
                logDirFs.getDefaultBlockSize(),
                null);
        conf.writeXml(jobFileOut);
        jobFileOut.close();
      }
    } catch (IOException ioe) {
      LOG.error("Failed to store job conf in the log dir", ioe);
    } finally {
      if (jobFileOut != null) {
        try {
          jobFileOut.close();
        } catch (IOException ie) {
          LOG.info(
              "Failed to close the job configuration file " + StringUtils.stringifyException(ie));
        }
      }
    }
  }