/** * Submit the job to the cluster and return immediately. * * @throws IOException */ public void submit() throws IOException, InterruptedException, ClassNotFoundException { ensureState(JobState.DEFINE); setUseNewAPI(); // Connect to the JobTracker and submit the job connect(); info = jobClient.submitJobInternal(conf); state = JobState.RUNNING; }
/** * Sets the flag that will allow the JobTracker to cancel the HDFS delegation tokens upon job * completion. Defaults to true. */ public void setCancelDelegationTokenUponJobCompletion(boolean value) { ensureState(JobState.DEFINE); conf.setBoolean(JOB_CANCEL_DELEGATION_TOKEN, value); }
/** * Gets the counters for this job. * * @return the counters for this job. * @throws IOException */ public Counters getCounters() throws IOException { ensureState(JobState.RUNNING); return new Counters(info.getCounters()); }
/** * Fail indicated task attempt. * * @param taskId the id of the task to be terminated. * @throws IOException */ public void failTask(TaskAttemptID taskId) throws IOException { ensureState(JobState.RUNNING); info.killTask(org.apache.hadoop.mapred.TaskAttemptID.downgrade(taskId), true); }
/** * Get events indicating completion (success/failure) of component tasks. * * @param startFrom index to start fetching events from * @return an array of {@link TaskCompletionEvent}s * @throws IOException */ public TaskCompletionEvent[] getTaskCompletionEvents(int startFrom) throws IOException { ensureState(JobState.RUNNING); return info.getTaskCompletionEvents(startFrom); }
/** * Kill the running job. Blocks until all job tasks have been killed as well. If the job is no * longer running, it simply returns. * * @throws IOException */ public void killJob() throws IOException { ensureState(JobState.RUNNING); info.killJob(); }
/** * Set the {@link Reducer} for the job. * * @param cls the <code>Reducer</code> to use * @throws IllegalStateException if the job is submitted */ public void setReducerClass(Class<? extends Reducer> cls) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(REDUCE_CLASS_ATTR, cls, Reducer.class); }
/** * Set the current working directory for the default file system. * * @param dir the new current working directory. * @throws IllegalStateException if the job is submitted */ public void setWorkingDirectory(Path dir) throws IOException { ensureState(JobState.DEFINE); conf.setWorkingDirectory(dir); }
/** * Get the <i>progress</i> of the job's reduce-tasks, as a float between 0.0 and 1.0. When all * reduce tasks have completed, the function returns 1.0. * * @return the progress of the job's reduce-tasks. * @throws IOException */ public float reduceProgress() throws IOException { ensureState(JobState.RUNNING); return info.reduceProgress(); }
/** * Get the URL where some job progress information will be displayed. * * @return the URL where some job progress information will be displayed. */ public String getTrackingURL() { ensureState(JobState.RUNNING); return info.getTrackingURL(); }
/** * Set the user-specified job name. * * @param name the job's new name. * @throws IllegalStateException if the job is submitted */ public void setJobName(String name) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setJobName(name); }
/** * Define the comparator that controls which keys are grouped together for a single call to {@link * Reducer#reduce(Object, Iterable, org.apache.hadoop.mapreduce.Reducer.Context)} * * @param cls the raw comparator to use * @throws IllegalStateException if the job is submitted */ public void setGroupingComparatorClass(Class<? extends RawComparator> cls) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setOutputValueGroupingComparator(cls); }
/** * Define the comparator that controls how the keys are sorted before they are passed to the * {@link Reducer}. * * @param cls the raw comparator * @throws IllegalStateException if the job is submitted */ public void setSortComparatorClass(Class<? extends RawComparator> cls) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setOutputKeyComparatorClass(cls); }
/** * Set the value class for job outputs. * * @param theClass the value class for job outputs. * @throws IllegalStateException if the job is submitted */ public void setOutputValueClass(Class<?> theClass) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setOutputValueClass(theClass); }
/** * Set the {@link Partitioner} for the job. * * @param cls the <code>Partitioner</code> to use * @throws IllegalStateException if the job is submitted */ public void setPartitionerClass(Class<? extends Partitioner> cls) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(PARTITIONER_CLASS_ATTR, cls, Partitioner.class); }
/** * Check if the job is finished or not. This is a non-blocking call. * * @return <code>true</code> if the job is complete, else <code>false</code>. * @throws IOException */ public boolean isComplete() throws IOException { ensureState(JobState.RUNNING); return info.isComplete(); }
/** * Set the number of reduce tasks for the job. * * @param tasks the number of reduce tasks * @throws IllegalStateException if the job is submitted */ public void setNumReduceTasks(int tasks) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setNumReduceTasks(tasks); }
/** * Check if the job completed successfully. * * @return <code>true</code> if the job succeeded, else <code>false</code>. * @throws IOException */ public boolean isSuccessful() throws IOException { ensureState(JobState.RUNNING); return info.isSuccessful(); }
/** * Set the {@link InputFormat} for the job. * * @param cls the <code>InputFormat</code> to use * @throws IllegalStateException if the job is submitted */ public void setInputFormatClass(Class<? extends InputFormat> cls) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(INPUT_FORMAT_CLASS_ATTR, cls, InputFormat.class); }
/** * Set the {@link Mapper} for the job. * * @param cls the <code>Mapper</code> to use * @throws IllegalStateException if the job is submitted */ public void setMapperClass(Class<? extends Mapper> cls) throws IllegalStateException { ensureState(JobState.DEFINE); conf.setClass(MAP_CLASS_ATTR, cls, Mapper.class); }