Esempio n. 1
0
 public Value<MapReduceResult<R>> run(
     MapReduceSpecification<I, K, V, O, R> mrSpec, MapReduceSettings settings) {
   String mrJobId = getJobKey().getName();
   FutureValue<ResultAndCounters<List<AppEngineFile>>> mapResult =
       futureCall(new MapJob<I, K, V>(mrJobId, mrSpec, settings), Util.jobSettings(settings));
   FutureValue<ShuffleResult<K, V, O>> shuffleResult =
       futureCall(
           new ShuffleJob<K, V, O>(mrJobId, mrSpec, settings),
           mapResult,
           Util.jobSettings(settings));
   futureCall(
       new IntermediateCleanupJob(mrJobId, settings),
       mapResult,
       Util.jobSettings(settings, waitFor(shuffleResult)));
   FutureValue<MapReduceResult<R>> reduceResult =
       futureCall(
           new ReduceJob<K, V, O, R>(mrJobId, mrSpec, settings),
           mapResult,
           shuffleResult,
           Util.jobSettings(settings));
   futureCall(
       new FinalCleanupJob<K, V, O>(mrJobId, settings),
       shuffleResult,
       Util.jobSettings(settings, waitFor(reduceResult)));
   return reduceResult;
 }
Esempio n. 2
0
 @Override
 public Value<Void> run(ResultAndCounters<List<AppEngineFile>> mapResult) {
   futureCall(
       new DeleteFilesJob("" + this),
       immediate(mapResult.getOutputResult()),
       Util.jobSettings(settings));
   return immediate(null);
 }
Esempio n. 3
0
 @Override
 public Value<Void> run(ShuffleResult<K, V, O> shuffleResult) {
   futureCall(
       new DeleteFilesJob("" + this),
       immediate(shuffleResult.getReducerInputFiles()),
       Util.jobSettings(settings));
   return immediate(null);
 }
Esempio n. 4
0
 @Override
 public Value<ResultAndCounters<List<AppEngineFile>>> run() {
   @SuppressWarnings("unchecked")
   PromisedValue<ResultAndCounters<List<AppEngineFile>>> result =
       (PromisedValue) newPromise(ResultAndCounters.class);
   String shardedJobId = mrJobId + "-map";
   List<? extends InputReader<I>> readers = Util.createReaders(mrSpec.getInput());
   Output<KeyValue<K, V>, List<AppEngineFile>> output =
       new IntermediateOutput<K, V>(
           mrJobId,
           readers.size(),
           mrSpec.getIntermediateKeyMarshaller(),
           mrSpec.getIntermediateValueMarshaller());
   startShardedJob(
       mrSpec.getJobName() + " (map phase)",
       shardedJobId,
       new CountersImpl(),
       readers,
       output,
       Util.createWriters(output),
       new TaskCreator<I, KeyValue<K, V>, MapperContext<K, V>>() {
         @Override
         public WorkerShardTask<I, KeyValue<K, V>, MapperContext<K, V>> createTask(
             int shard,
             int shardCount,
             InputReader<I> reader,
             OutputWriter<KeyValue<K, V>> writer) {
           return new MapShardTask<I, K, V>(
               mrJobId,
               shard,
               shardCount,
               reader,
               mrSpec.getMapper(),
               writer,
               settings.getMillisPerSlice());
         }
       },
       result.getHandle(),
       settings);
   setStatusConsoleUrl(settings.getBaseUrl() + "detail?mapreduce_id=" + shardedJobId);
   return result;
 }
Esempio n. 5
0
 /**
  * Starts a {@link MapReduceJob} with the given parameters in a new Pipeline. Returns the pipeline
  * id.
  */
 public static <I, K, V, O, R> String start(
     MapReduceSpecification<I, K, V, O, R> specification, MapReduceSettings settings) {
   PipelineService pipelineService = PipelineServiceFactory.newPipelineService();
   return pipelineService.startNewPipeline(
       new MapReduceJob<I, K, V, O, R>(), specification, settings, Util.jobSettings(settings));
 }