コード例 #1
0
ファイル: TopNJob.java プロジェクト: sdgdsffdsfff/grade
 @Override
 public int run(String[] strings) throws Exception {
   Configuration configuration = getConf();
   configuration.setLong("mapred.min.split.size", 512 * 1024 * 1024L);
   Job numJob = new Job(configuration, "calculate film program seed num job ");
   Path[] paths = getPaths(strings[0].split(","));
   HadoopUtils.deleteIfExist(strings[1]);
   MapReduceUtils.initMapperJob(
       NumCountMapper.class, Text.class, Text.class, this.getClass(), numJob, paths);
   // TableMapReduceUtil.initTableReducerJob(strings[1], NumCountReducer.class, numJob);
   MapReduceUtils.initReducerJob(new Path(strings[1]), NumCountReducer.class, numJob);
   numJob.waitForCompletion(true);
   Job programeSets = new Job(configuration, "calculate program set num job");
   HadoopUtils.deleteIfExist(strings[2]);
   MapReduceUtils.initMapperJob(
       NumProgramSetsMapper.class,
       Text.class,
       Text.class,
       this.getClass(),
       programeSets,
       new Path(strings[1]));
   programeSets.setCombinerClass(NumProgramSetCombiner.class);
   MapReduceUtils.initReducerJob(new Path(strings[2]), NumProgramSetsReducer.class, programeSets);
   return programeSets.waitForCompletion(true) ? 0 : 1;
   //        return 0;
 }
コード例 #2
0
ファイル: CorrelateJob.java プロジェクト: sdgdsffdsfff/grade
 @Override
 public int run(String[] strings) throws Exception {
   Configuration configuration = getConf();
   HadoopUtils.deleteIfExist(strings[1]);
   Job correlate = new Job(configuration, "icntv correlate job");
   MapReduceUtils.initMapperJob(
       UserHistoryMapper.class,
       Text.class,
       Text.class,
       this.getClass(),
       correlate,
       getPaths(strings[0].split(",")));
   MapReduceUtils.initReducerJob(new Path(strings[1]), UserHistoryReducer.class, correlate);
   if (!correlate.waitForCompletion(true)) {
     return 1;
   }
   ;
   Parameters parameter = getParameter(strings[2]);
   HadoopUtils.deleteIfExist(parameter.get("output"));
   PFPGrowth.runPFPGrowth(parameter, configuration);
   String output = parameter.get("output") + "/frequentpatterns";
   long count =
       HadoopUtils.count(
           new Path(output),
           new PathFilter() {
             @Override
             public boolean accept(Path path) {
               return path.getName()
                   .matches(
                       "part-r-\\d*"); // To change body of implemented methods use File | Settings
                                       // | File Templates.
             }
           });
   if (count == 0) {
     return 1;
   }
   configuration.setLong("icntv.correlate.total.size", count);
   HadoopUtils.deleteIfExist(strings[3]);
   Job result = new Job(configuration, "correlate result calculate");
   MapReduceUtils.initMapperJob(
       CorrelateInputMapper.class,
       Text.class,
       Text.class,
       this.getClass(),
       result,
       new Path(output));
   result.setInputFormatClass(SequenceFileInputFormat.class);
   //        TableMapReduceUtil.initTableReducerJob("");
   MapReduceUtils.initReducerJob(new Path(strings[3]), CorrelateOutPutReducer.class, result);
   result.waitForCompletion(true);
   return 0; // To change body of implemented methods use File | Settings | File Templates.
 }