public void processEventForJobSummary(HistoryEvent event, JobSummary summary, JobId jobId) { // context.getJob could be used for some of this info as well. switch (event.getEventType()) { case JOB_SUBMITTED: JobSubmittedEvent jse = (JobSubmittedEvent) event; summary.setUser(jse.getUserName()); summary.setQueue(jse.getJobQueueName()); summary.setJobSubmitTime(jse.getSubmitTime()); summary.setJobName(jse.getJobName()); break; case NORMALIZED_RESOURCE: NormalizedResourceEvent normalizedResourceEvent = (NormalizedResourceEvent) event; if (normalizedResourceEvent.getTaskType() == TaskType.MAP) { summary.setResourcesPerMap(normalizedResourceEvent.getMemory()); } else if (normalizedResourceEvent.getTaskType() == TaskType.REDUCE) { summary.setResourcesPerReduce(normalizedResourceEvent.getMemory()); } break; case JOB_INITED: JobInitedEvent jie = (JobInitedEvent) event; summary.setJobLaunchTime(jie.getLaunchTime()); break; case MAP_ATTEMPT_STARTED: TaskAttemptStartedEvent mtase = (TaskAttemptStartedEvent) event; if (summary.getFirstMapTaskLaunchTime() == 0) summary.setFirstMapTaskLaunchTime(mtase.getStartTime()); break; case REDUCE_ATTEMPT_STARTED: TaskAttemptStartedEvent rtase = (TaskAttemptStartedEvent) event; if (summary.getFirstReduceTaskLaunchTime() == 0) summary.setFirstReduceTaskLaunchTime(rtase.getStartTime()); break; case JOB_FINISHED: JobFinishedEvent jfe = (JobFinishedEvent) event; summary.setJobFinishTime(jfe.getFinishTime()); summary.setNumFinishedMaps(jfe.getFinishedMaps()); summary.setNumFailedMaps(jfe.getFailedMaps()); summary.setNumFinishedReduces(jfe.getFinishedReduces()); summary.setNumFailedReduces(jfe.getFailedReduces()); if (summary.getJobStatus() == null) summary.setJobStatus(org.apache.hadoop.mapreduce.JobStatus.State.SUCCEEDED.toString()); // TODO JOB_FINISHED does not have state. Effectively job history does not // have state about the finished job. setSummarySlotSeconds(summary, jfe.getTotalCounters()); break; case JOB_FAILED: case JOB_KILLED: JobUnsuccessfulCompletionEvent juce = (JobUnsuccessfulCompletionEvent) event; summary.setJobStatus(juce.getStatus()); summary.setNumFinishedMaps(context.getJob(jobId).getTotalMaps()); summary.setNumFinishedReduces(context.getJob(jobId).getTotalReduces()); summary.setJobFinishTime(juce.getFinishTime()); setSummarySlotSeconds(summary, context.getJob(jobId).getAllCounters()); break; } }
/** * ************************************************ Describes the current status of a job. This is * not intended to be a comprehensive piece of data. For that, look at JobProfile. * ************************************************ * * @deprecated Use {@link org.apache.hadoop.mapreduce.JobStatus} instead */ @Deprecated @InterfaceAudience.Public @InterfaceStability.Stable public class JobStatus extends org.apache.hadoop.mapreduce.JobStatus { public static final int RUNNING = org.apache.hadoop.mapreduce.JobStatus.State.RUNNING.getValue(); public static final int SUCCEEDED = org.apache.hadoop.mapreduce.JobStatus.State.SUCCEEDED.getValue(); public static final int FAILED = org.apache.hadoop.mapreduce.JobStatus.State.FAILED.getValue(); public static final int PREP = org.apache.hadoop.mapreduce.JobStatus.State.PREP.getValue(); public static final int KILLED = org.apache.hadoop.mapreduce.JobStatus.State.KILLED.getValue(); private static final String UNKNOWN = "UNKNOWN"; private static final String[] runStates = { UNKNOWN, "RUNNING", "SUCCEEDED", "FAILED", "PREP", "KILLED" }; /** * Helper method to get human-readable state of the job. * * @param state job state * @return human-readable state of the job */ public static String getJobRunState(int state) { if (state < 1 || state >= runStates.length) { return UNKNOWN; } return runStates[state]; } static org.apache.hadoop.mapreduce.JobStatus.State getEnum(int state) { switch (state) { case 1: return org.apache.hadoop.mapreduce.JobStatus.State.RUNNING; case 2: return org.apache.hadoop.mapreduce.JobStatus.State.SUCCEEDED; case 3: return org.apache.hadoop.mapreduce.JobStatus.State.FAILED; case 4: return org.apache.hadoop.mapreduce.JobStatus.State.PREP; case 5: return org.apache.hadoop.mapreduce.JobStatus.State.KILLED; } return null; } /** */ public JobStatus() {} /** * Create a job status object for a given jobid. * * @param jobid The jobid of the job * @param mapProgress The progress made on the maps * @param reduceProgress The progress made on the reduces * @param cleanupProgress The progress made on cleanup * @param runState The current state of the job * @param user userid of the person who submitted the job. * @param jobName user-specified job name. * @param jobFile job configuration file. * @param trackingUrl link to the web-ui for details of the job. */ public JobStatus( JobID jobid, float mapProgress, float reduceProgress, float cleanupProgress, int runState, String user, String jobName, String jobFile, String trackingUrl) { this( jobid, mapProgress, reduceProgress, cleanupProgress, runState, JobPriority.NORMAL, user, jobName, jobFile, trackingUrl); } /** * Create a job status object for a given jobid. * * @param jobid The jobid of the job * @param mapProgress The progress made on the maps * @param reduceProgress The progress made on the reduces * @param runState The current state of the job * @param user userid of the person who submitted the job. * @param jobName user-specified job name. * @param jobFile job configuration file. * @param trackingUrl link to the web-ui for details of the job. */ public JobStatus( JobID jobid, float mapProgress, float reduceProgress, int runState, String user, String jobName, String jobFile, String trackingUrl) { this(jobid, mapProgress, reduceProgress, 0.0f, runState, user, jobName, jobFile, trackingUrl); } /** * Create a job status object for a given jobid. * * @param jobid The jobid of the job * @param mapProgress The progress made on the maps * @param reduceProgress The progress made on the reduces * @param runState The current state of the job * @param jp Priority of the job. * @param user userid of the person who submitted the job. * @param jobName user-specified job name. * @param jobFile job configuration file. * @param trackingUrl link to the web-ui for details of the job. */ public JobStatus( JobID jobid, float mapProgress, float reduceProgress, float cleanupProgress, int runState, JobPriority jp, String user, String jobName, String jobFile, String trackingUrl) { this( jobid, 0.0f, mapProgress, reduceProgress, cleanupProgress, runState, jp, user, jobName, jobFile, trackingUrl); } /** * Create a job status object for a given jobid. * * @param jobid The jobid of the job * @param setupProgress The progress made on the setup * @param mapProgress The progress made on the maps * @param reduceProgress The progress made on the reduces * @param cleanupProgress The progress made on the cleanup * @param runState The current state of the job * @param jp Priority of the job. * @param user userid of the person who submitted the job. * @param jobName user-specified job name. * @param jobFile job configuration file. * @param trackingUrl link to the web-ui for details of the job. */ public JobStatus( JobID jobid, float setupProgress, float mapProgress, float reduceProgress, float cleanupProgress, int runState, JobPriority jp, String user, String jobName, String jobFile, String trackingUrl) { super( jobid, setupProgress, mapProgress, reduceProgress, cleanupProgress, getEnum(runState), org.apache.hadoop.mapreduce.JobPriority.valueOf(jp.name()), user, jobName, jobFile, trackingUrl); } public static JobStatus downgrade(org.apache.hadoop.mapreduce.JobStatus stat) { JobStatus old = new JobStatus( JobID.downgrade(stat.getJobID()), stat.getSetupProgress(), stat.getMapProgress(), stat.getReduceProgress(), stat.getCleanupProgress(), stat.getState().getValue(), JobPriority.valueOf(stat.getPriority().name()), stat.getUsername(), stat.getJobName(), stat.getJobFile(), stat.getTrackingUrl()); old.setStartTime(stat.getStartTime()); old.setFinishTime(stat.getFinishTime()); old.setSchedulingInfo(stat.getSchedulingInfo()); old.setHistoryFile(stat.getHistoryFile()); return old; } /** @deprecated use getJobID instead */ @Deprecated public String getJobId() { return getJobID().toString(); } /** @return The jobid of the Job */ public JobID getJobID() { return JobID.downgrade(super.getJobID()); } /** * Return the priority of the job * * @return job priority */ public synchronized JobPriority getJobPriority() { return JobPriority.valueOf(super.getPriority().name()); } /** * Sets the map progress of this job * * @param p The value of map progress to set to */ protected synchronized void setMapProgress(float p) { super.setMapProgress(p); } /** * Sets the cleanup progress of this job * * @param p The value of cleanup progress to set to */ protected synchronized void setCleanupProgress(float p) { super.setCleanupProgress(p); } /** * Sets the setup progress of this job * * @param p The value of setup progress to set to */ protected synchronized void setSetupProgress(float p) { super.setSetupProgress(p); } /** * Sets the reduce progress of this Job * * @param p The value of reduce progress to set to */ protected synchronized void setReduceProgress(float p) { super.setReduceProgress(p); } /** * Set the finish time of the job * * @param finishTime The finishTime of the job */ protected synchronized void setFinishTime(long finishTime) { super.setFinishTime(finishTime); } /** Set the job history file url for a completed job */ protected synchronized void setHistoryFile(String historyFile) { super.setHistoryFile(historyFile); } /** Set the link to the web-ui for details of the job. */ protected synchronized void setTrackingUrl(String trackingUrl) { super.setTrackingUrl(trackingUrl); } /** Set the job retire flag to true. */ protected synchronized void setRetired() { super.setRetired(); } /** Change the current run state of the job. */ protected synchronized void setRunState(int state) { super.setState(getEnum(state)); } /** @return running state of the job */ public synchronized int getRunState() { return super.getState().getValue(); } /** * Set the start time of the job * * @param startTime The startTime of the job */ protected synchronized void setStartTime(long startTime) { super.setStartTime(startTime); } /** @param userName The username of the job */ protected synchronized void setUsername(String userName) { super.setUsername(userName); } /** * Used to set the scheduling information associated to a particular Job. * * @param schedulingInfo Scheduling information of the job */ protected synchronized void setSchedulingInfo(String schedulingInfo) { super.setSchedulingInfo(schedulingInfo); } protected synchronized void setJobACLs(Map<JobACL, AccessControlList> acls) { super.setJobACLs(acls); } /** * Set the priority of the job, defaulting to NORMAL. * * @param jp new job priority */ public synchronized void setJobPriority(JobPriority jp) { super.setPriority(org.apache.hadoop.mapreduce.JobPriority.valueOf(jp.name())); } /** @return Percentage of progress in maps */ public synchronized float mapProgress() { return super.getMapProgress(); } /** @return Percentage of progress in cleanup */ public synchronized float cleanupProgress() { return super.getCleanupProgress(); } /** @return Percentage of progress in setup */ public synchronized float setupProgress() { return super.getSetupProgress(); } /** @return Percentage of progress in reduce */ public synchronized float reduceProgress() { return super.getReduceProgress(); } // A utility to convert new job runstates to the old ones. static int getOldNewJobRunState(org.apache.hadoop.mapreduce.JobStatus.State state) { return state.getValue(); } }