コード例 #1
0
 /** Configure a job given argv. */
 public static boolean parseArgs(String[] argv, JobConf job) throws IOException {
   if (argv.length < 1) {
     return 0 == printUsage();
   }
   for (int i = 0; i < argv.length; ++i) {
     if (argv.length == i + 1) {
       System.out.println("ERROR: Required parameter missing from " + argv[i]);
       return 0 == printUsage();
     }
     try {
       if ("-m".equals(argv[i])) {
         job.setNumMapTasks(Integer.parseInt(argv[++i]));
       } else if ("-r".equals(argv[i])) {
         job.setNumReduceTasks(Integer.parseInt(argv[++i]));
       } else if ("-inFormat".equals(argv[i])) {
         job.setInputFormat(Class.forName(argv[++i]).asSubclass(InputFormat.class));
       } else if ("-outFormat".equals(argv[i])) {
         job.setOutputFormat(Class.forName(argv[++i]).asSubclass(OutputFormat.class));
       } else if ("-outKey".equals(argv[i])) {
         job.setOutputKeyClass(Class.forName(argv[++i]).asSubclass(WritableComparable.class));
       } else if ("-outValue".equals(argv[i])) {
         job.setOutputValueClass(Class.forName(argv[++i]).asSubclass(Writable.class));
       } else if ("-keepmap".equals(argv[i])) {
         job.set(
             org.apache.hadoop.mapreduce.GenericMRLoadGenerator.MAP_PRESERVE_PERCENT, argv[++i]);
       } else if ("-keepred".equals(argv[i])) {
         job.set(
             org.apache.hadoop.mapreduce.GenericMRLoadGenerator.REDUCE_PRESERVE_PERCENT,
             argv[++i]);
       } else if ("-outdir".equals(argv[i])) {
         FileOutputFormat.setOutputPath(job, new Path(argv[++i]));
       } else if ("-indir".equals(argv[i])) {
         FileInputFormat.addInputPaths(job, argv[++i]);
       } else if ("-inFormatIndirect".equals(argv[i])) {
         job.setClass(
             org.apache.hadoop.mapreduce.GenericMRLoadGenerator.INDIRECT_INPUT_FORMAT,
             Class.forName(argv[++i]).asSubclass(InputFormat.class),
             InputFormat.class);
         job.setInputFormat(IndirectInputFormat.class);
       } else {
         System.out.println("Unexpected argument: " + argv[i]);
         return 0 == printUsage();
       }
     } catch (NumberFormatException except) {
       System.out.println("ERROR: Integer expected instead of " + argv[i]);
       return 0 == printUsage();
     } catch (Exception e) {
       throw (IOException) new IOException().initCause(e);
     }
   }
   return true;
 }
コード例 #2
0
  public void testFailAbort() throws IOException {
    JobConf job = new JobConf();
    job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
    job.setClass("fs.faildel.impl", FakeFileSystem.class, FileSystem.class);
    setConfForFileOutputCommitter(job);
    JobContext jContext = new JobContextImpl(job, taskID.getJobID());
    TaskAttemptContext tContext = new TaskAttemptContextImpl(job, taskID);
    FileOutputCommitter committer = new FileOutputCommitter();
    FileOutputFormat.setWorkOutputPath(job, committer.getTempTaskOutputPath(tContext));

    // do setup
    committer.setupJob(jContext);
    committer.setupTask(tContext);
    String file = "test.txt";

    // A reporter that does nothing
    Reporter reporter = Reporter.NULL;
    // write output
    FileSystem localFs = new FakeFileSystem();
    TextOutputFormat theOutputFormat = new TextOutputFormat();
    RecordWriter theRecordWriter = theOutputFormat.getRecordWriter(localFs, job, file, reporter);
    writeOutput(theRecordWriter, reporter);

    // do abort
    Throwable th = null;
    try {
      committer.abortTask(tContext);
    } catch (IOException ie) {
      th = ie;
    }
    assertNotNull(th);
    assertTrue(th instanceof IOException);
    assertTrue(th.getMessage().contains("fake delete failed"));
    File jobTmpDir = new File(new Path(outDir, FileOutputCommitter.TEMP_DIR_NAME).toString());
    File taskTmpDir = new File(jobTmpDir, "_" + taskID);
    File expectedFile = new File(taskTmpDir, file);
    assertTrue(expectedFile + " does not exists", expectedFile.exists());

    th = null;
    try {
      committer.abortJob(jContext, JobStatus.State.FAILED);
    } catch (IOException ie) {
      th = ie;
    }
    assertNotNull(th);
    assertTrue(th instanceof IOException);
    assertTrue(th.getMessage().contains("fake delete failed"));
    assertTrue("job temp dir does not exists", jobTmpDir.exists());
  }
コード例 #3
0
ファイル: SimulatorEngine.java プロジェクト: ukulililixl/core
  /**
   * Creates the configuration for mumak simulation. This is kept modular mostly for testing
   * purposes. so that the standard configuration can be modified before passing it to the init()
   * function.
   *
   * @return JobConf: the configuration for the SimulatorJobTracker
   */
  JobConf createMumakConf() {
    JobConf jobConf = new JobConf(getConf());
    jobConf.setClass(
        "topology.node.switch.mapping.impl", StaticMapping.class, DNSToSwitchMapping.class);
    jobConf.set("fs.default.name", "file:///");
    jobConf.set("mapred.job.tracker", "localhost:8012");
    jobConf.setInt("mapred.jobtracker.job.history.block.size", 512);
    jobConf.setInt("mapred.jobtracker.job.history.buffer.size", 512);
    jobConf.setLong("mapred.tasktracker.expiry.interval", 5000);
    jobConf.setInt("mapred.reduce.copy.backoff", 4);
    jobConf.setLong("mapred.job.reuse.jvm.num.tasks", -1);
    jobConf.setUser("mumak");
    jobConf.set(
        "mapred.system.dir",
        jobConf.get("hadoop.log.dir", "/tmp/hadoop-" + jobConf.getUser()) + "/mapred/system");

    return jobConf;
  }
コード例 #4
0
ファイル: TestMapProgress.java プロジェクト: imace/hops
  /**
   * Validates map phase progress after each record is processed by map task using custom task
   * reporter.
   */
  public void testMapProgress() throws Exception {
    JobConf job = new JobConf();
    fs = FileSystem.getLocal(job);
    Path rootDir = new Path(TEST_ROOT_DIR);
    createInputFile(rootDir);

    job.setNumReduceTasks(0);
    TaskAttemptID taskId = TaskAttemptID.forName("attempt_200907082313_0424_m_000000_0");
    job.setClass("mapreduce.job.outputformat.class", NullOutputFormat.class, OutputFormat.class);
    job.set(org.apache.hadoop.mapreduce.lib.input.FileInputFormat.INPUT_DIR, TEST_ROOT_DIR);
    jobId = taskId.getJobID();

    JobContext jContext = new JobContextImpl(job, jobId);
    InputFormat<?, ?> input = ReflectionUtils.newInstance(jContext.getInputFormatClass(), job);

    List<InputSplit> splits = input.getSplits(jContext);
    JobSplitWriter.createSplitFiles(
        new Path(TEST_ROOT_DIR), job, new Path(TEST_ROOT_DIR).getFileSystem(job), splits);
    TaskSplitMetaInfo[] splitMetaInfo =
        SplitMetaInfoReader.readSplitMetaInfo(jobId, fs, job, new Path(TEST_ROOT_DIR));
    job.setUseNewMapper(true); // use new api
    for (int i = 0; i < splitMetaInfo.length; i++) { // rawSplits.length is 1
      map =
          new TestMapTask(
              job.get(JTConfig.JT_SYSTEM_DIR, "/tmp/hadoop/mapred/system") + jobId + "job.xml",
              taskId,
              i,
              splitMetaInfo[i].getSplitIndex(),
              1);

      JobConf localConf = new JobConf(job);
      map.localizeConfiguration(localConf);
      map.setConf(localConf);
      map.run(localConf, fakeUmbilical);
    }
    // clean up
    fs.delete(rootDir, true);
  }
コード例 #5
0
 /** Create the job tracker and run it. */
 public void run() {
   try {
     jc = (jc == null) ? createJobConf() : createJobConf(jc);
     File f = new File("build/test/mapred/local").getAbsoluteFile();
     jc.set("mapred.local.dir", f.getAbsolutePath());
     jc.setClass(
         "topology.node.switch.mapping.impl", StaticMapping.class, DNSToSwitchMapping.class);
     final String id = new SimpleDateFormat("yyyyMMddHHmmssSSS").format(new Date());
     if (ugi == null) {
       ugi = UserGroupInformation.getCurrentUser();
     }
     tracker =
         ugi.doAs(
             new PrivilegedExceptionAction<JobTracker>() {
               public JobTracker run() throws InterruptedException, IOException {
                 return JobTracker.startTracker(jc, id);
               }
             });
     tracker.offerService();
   } catch (Throwable e) {
     LOG.error("Job tracker crashed", e);
     isActive = false;
   }
 }
コード例 #6
0
  private void testFailAbortInternal(int version) throws IOException, InterruptedException {
    JobConf conf = new JobConf();
    conf.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
    conf.setClass("fs.faildel.impl", FakeFileSystem.class, FileSystem.class);
    conf.set(JobContext.TASK_ATTEMPT_ID, attempt);
    conf.setInt(
        org.apache.hadoop.mapreduce.lib.output.FileOutputCommitter
            .FILEOUTPUTCOMMITTER_ALGORITHM_VERSION,
        version);
    conf.setInt(MRConstants.APPLICATION_ATTEMPT_ID, 1);
    FileOutputFormat.setOutputPath(conf, outDir);
    JobContext jContext = new JobContextImpl(conf, taskID.getJobID());
    TaskAttemptContext tContext = new TaskAttemptContextImpl(conf, taskID);
    FileOutputCommitter committer = new FileOutputCommitter();

    // do setup
    committer.setupJob(jContext);
    committer.setupTask(tContext);

    // write output
    File jobTmpDir =
        new File(
            new Path(
                    outDir,
                    FileOutputCommitter.TEMP_DIR_NAME
                        + Path.SEPARATOR
                        + conf.getInt(MRConstants.APPLICATION_ATTEMPT_ID, 0)
                        + Path.SEPARATOR
                        + FileOutputCommitter.TEMP_DIR_NAME)
                .toString());
    File taskTmpDir = new File(jobTmpDir, "_" + taskID);
    File expectedFile = new File(taskTmpDir, partFile);
    TextOutputFormat<?, ?> theOutputFormat = new TextOutputFormat();
    RecordWriter<?, ?> theRecordWriter =
        theOutputFormat.getRecordWriter(null, conf, expectedFile.getAbsolutePath(), null);
    writeOutput(theRecordWriter, tContext);

    // do abort
    Throwable th = null;
    try {
      committer.abortTask(tContext);
    } catch (IOException ie) {
      th = ie;
    }
    assertNotNull(th);
    assertTrue(th instanceof IOException);
    assertTrue(th.getMessage().contains("fake delete failed"));
    assertTrue(expectedFile + " does not exists", expectedFile.exists());

    th = null;
    try {
      committer.abortJob(jContext, JobStatus.State.FAILED);
    } catch (IOException ie) {
      th = ie;
    }
    assertNotNull(th);
    assertTrue(th instanceof IOException);
    assertTrue(th.getMessage().contains("fake delete failed"));
    assertTrue("job temp dir does not exists", jobTmpDir.exists());
    FileUtil.fullyDelete(new File(outDir.toString()));
  }