public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] remainArgs = new GenericOptionsParser(conf, args).getRemainingArgs();

    if (remainArgs.length != 2) {
      System.err.println("Usage: wordcount <input> <output>");
      System.exit(1);
    }

    Job job = new Job(conf, "wordcount");
    job.setJarByClass(WordCount.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);

    job.setMapperClass(Map.class);
    job.setCombinerClass(Reduce.class);
    job.setReducerClass(Reduce.class);

    job.setNumReduceTasks(4);

    job.setInputFormatClass(TextInputFormat.class);
    job.setOutputFormatClass(TextOutputFormat.class);

    FileSystem.get(conf).delete(new Path(remainArgs[1]), true);

    FileInputFormat.setInputPaths(job, new Path(remainArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(remainArgs[1]));

    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
示例#2
0
  public static void main(String[] args) throws Exception {
    Configuration conf = new Configuration();
    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
      System.err.println("Usage: test.icde12.HadoopJoin <in> <out>");
      System.exit(2);
    }
    Job job = new Job(conf, "hadoop join");
    job.setJarByClass(HadoopJoin.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);

    job.setPartitionerClass(ICDEPartitioner.class);

    //    WritableComparator.define(Text.class,new ICDEComparator());

    job.setSortComparatorClass(ICDEComparator.class);

    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(Text.class);
    job.setNumReduceTasks(8);
    FileInputFormat.addInputPath(job, new Path(otherArgs[0]));
    FileOutputFormat.setOutputPath(job, new Path(otherArgs[1]));
    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
示例#3
0
  /** Sets up configuration based on params */
  private static boolean setup(Hashtable<String, String> curConf, Configuration argConf) {

    if (argConf.get("file") == null) {
      logger.fatal("Missing file parameter");
      System.exit(1);
    }

    if (argConf.get("hdfs_base_path") == null) {
      logger.fatal("Missing HDFS base path, check gestore-conf.xml");
      System.exit(1);
    }

    if (argConf.get("hdfs_temp_path") == null) {
      logger.fatal("Missing HDFS temp path, check gestore-conf.xml");
      System.exit(1);
    }

    if (argConf.get("local_temp_path") == null) {
      logger.fatal("Missing local temp path, check gestore-conf.xml");
      System.exit(1);
    }

    // Input paramaters
    curConf.put("run_id", argConf.get("run", ""));
    curConf.put("task_id", argConf.get("task", ""));
    curConf.put("file_id", argConf.get("file"));
    curConf.put("local_path", argConf.get("path", ""));
    curConf.put("type", argConf.get("type", "l2r"));
    curConf.put("timestamp_start", argConf.get("timestamp_start", "1"));
    curConf.put(
        "timestamp_stop", argConf.get("timestamp_stop", Integer.toString(Integer.MAX_VALUE)));
    curConf.put("delimiter", argConf.get("regex", "ID=.*"));
    curConf.put("taxon", argConf.get("taxon", "all"));
    curConf.put("intermediate", argConf.get("full_run", "false"));
    curConf.put("quick_add", argConf.get("quick_add", "false"));
    Boolean full_run = curConf.get("intermediate").matches("(?i).*true.*");
    curConf.put("format", argConf.get("format", "unknown"));
    curConf.put("split", argConf.get("split", "1"));
    curConf.put("copy", argConf.get("copy", "true"));

    // Constants
    curConf.put("base_path", argConf.get("hdfs_base_path"));
    curConf.put("temp_path", argConf.get("hdfs_temp_path"));
    curConf.put("local_temp_path", argConf.get("local_temp_path"));
    curConf.put("db_name_files", argConf.get("hbase_file_table"));
    curConf.put("db_name_runs", argConf.get("hbase_run_table"));
    curConf.put("db_name_updates", argConf.get("hbase_db_update_table"));

    // Timestamps
    Date currentTime = new Date();
    Date endDate = new Date(new Long(curConf.get("timestamp_stop")));
    curConf.put("timestamp_real", Long.toString(currentTime.getTime()));

    return true;
  }
示例#4
0
  public static void main(final String[] pArgs) throws Exception {
    TreasuryYieldMulti tym = new TreasuryYieldMulti();

    // Here is an example of how to use multiple collections as the input to
    // a hadoop job, from within Java code directly.
    MultiCollectionSplitBuilder mcsb = new MultiCollectionSplitBuilder();
    mcsb.add(
            new MongoURI("mongodb://localhost:27017/mongo_hadoop.yield_historical.in"),
            (MongoURI) null, // authuri
            true, // notimeout
            (DBObject) null, // fields
            (DBObject) null, // sort
            (DBObject) null, // query
            false)
        .add(
            new MongoURI("mongodb://localhost:27017/mongo_hadoop.yield_historical.in"),
            (MongoURI) null, // authuri
            true, // notimeout
            (DBObject) null, // fields
            (DBObject) null, // sort
            new BasicDBObject("_id", new BasicDBObject("$gt", new Date(883440000000L))),
            false); // range query

    Configuration conf = new Configuration();
    conf.set(MultiMongoCollectionSplitter.MULTI_COLLECTION_CONF_KEY, mcsb.toJSON());
    conf.setSplitterClass(conf, MultiMongoCollectionSplitter.class);

    System.exit(ToolRunner.run(conf, new TreasuryYieldXMLConfig(), pArgs));
  }
  @Override
  public int run(String[] args) throws Exception {
    JobConf conf = new JobConf(getConf(), getClass());
    conf.setJobName("UFO count");

    String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs();
    if (otherArgs.length != 2) {
      System.err.println("Usage: avro UFO counter <in> <out>");
      System.exit(2);
    }

    FileInputFormat.addInputPath(conf, new Path(otherArgs[0]));
    Path outputPath = new Path(otherArgs[1]);
    FileOutputFormat.setOutputPath(conf, outputPath);
    outputPath.getFileSystem(conf).delete(outputPath);
    Schema input_schema = Schema.parse(getClass().getResourceAsStream("ufo.avsc"));
    AvroJob.setInputSchema(conf, input_schema);
    AvroJob.setMapOutputSchema(
        conf,
        Pair.getPairSchema(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.LONG)));

    AvroJob.setOutputSchema(conf, OUTPUT_SCHEMA);
    AvroJob.setMapperClass(conf, AvroRecordMapper.class);
    AvroJob.setReducerClass(conf, AvroRecordReducer.class);
    conf.setInputFormat(AvroInputFormat.class);
    JobClient.runJob(conf);

    return 0;
  }
示例#6
0
 public static void main(String[] args) {
   int retval = 0;
   try {
     retval = ToolRunner.run(new Configuration(), new HashtagCountTool(), args);
   } catch (Exception e) {
     System.out.println(e.getMessage());
     retval = 2;
   }
   System.exit(retval);
 }
  public static boolean stopIteration(Configuration conf) throws IOException {
    FileSystem fs = FileSystem.get(conf);
    Path preFile = new Path("preX/Result");
    Path curFile = new Path("curX/part-00000");

    if (!(fs.exists(preFile) && fs.exists(curFile))) {
      System.exit(1);
    }

    boolean stop = true;
    String line1, line2;
    FSDataInputStream in1 = fs.open(preFile);
    FSDataInputStream in2 = fs.open(curFile);
    InputStreamReader isr1 = new InputStreamReader(in1);
    InputStreamReader isr2 = new InputStreamReader(in2);
    BufferedReader br1 = new BufferedReader(isr1);
    BufferedReader br2 = new BufferedReader(isr2);

    while ((line1 = br1.readLine()) != null && (line2 = br2.readLine()) != null) {
      String[] str1 = line1.split("\\s+");
      String[] str2 = line2.split("\\s+");
      double preElem = Double.parseDouble(str1[1]);
      double curElem = Double.parseDouble(str2[1]);
      if (Math.abs(preElem - curElem) > eps) {
        stop = false;
        break;
      }
    }

    if (stop == false) {
      fs.delete(preFile, true);
      if (fs.rename(curFile, preFile) == false) {
        System.exit(1);
      }
    }
    return stop;
  }
示例#8
0
  public static void main(String[] args) throws Exception {
    Job job = new Job();
    job.setJarByClass(Sort.class);
    job.setJobName("Sort");

    FileInputFormat.addInputPath(job, new Path("hdfs://localhost:9000/input/"));
    FileOutputFormat.setOutputPath(job, new Path("hdfs://localhost:9000/output/"));
    job.setMapperClass(Map.class);
    // job.setCombinerClass(Reduce.class);
    job.setReducerClass(Reduce.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(IntWritable.class);
    job.setNumReduceTasks(2);

    System.exit(job.waitForCompletion(true) ? 0 : 1);
  }
  public static void main(String[] args) throws Exception {
    log.info("Beginning job...");
    Configuration conf = new Configuration();
    String[] inputArgs = new GenericOptionsParser(conf, args).getRemainingArgs();

    List<String> other_args = new ArrayList<String>();
    for (int i = 0; i < args.length; ++i) {
      if ("-setup".equals(inputArgs[i])) {
        DistributedCache.addCacheFile(new Path(inputArgs[++i]).toUri(), conf);
        conf.setBoolean("minibatch.job.setup", true);
      } else {
        other_args.add(inputArgs[i]);
      }
    }
    String[] tool_args = other_args.toArray(new String[0]);
    int result = ToolRunner.run(conf, new BinarySigmoidRBM(), tool_args);
    System.exit(result);
  }
示例#10
0
  public int run(String[] args) throws Exception {
    Path tempDir = new Path("/user/akhfa/temp");

    Configuration conf = new Configuration();
    Job job = Job.getInstance(conf, "word count");
    job.setJarByClass(AuthorCounter.class);
    job.setMapperClass(TokenizerMapper.class);
    job.setCombinerClass(IntSumReducer.class);
    job.setReducerClass(IntSumReducer.class);
    job.setOutputKeyClass(Text.class);
    job.setOutputValueClass(LongWritable.class);
    job.setOutputFormatClass(SequenceFileOutputFormat.class);

    FileInputFormat.addInputPath(job, new Path(args[0]));
    FileOutputFormat.setOutputPath(job, tempDir);
    System.exit(job.waitForCompletion(true) ? 0 : 1);

    return 0;
  }
 public static void main(String[] args) throws Exception {
   int res = ToolRunner.run(new Configuration(), new MapTestID(), args);
   System.exit(res);
 }
示例#12
0
 public static void main(String[] args) throws Exception {
   int res = ToolRunner.run(new Configuration(), new XiangLi1_exercise3(), args);
   System.exit(res);
 }
 public static void main(String[] args) throws Exception {
   int exitCode = ToolRunner.run(new PartitionByStationUsingMultipleOutputs(), args);
   System.exit(exitCode);
 }
示例#14
0
 public static void main(String[] args) throws Exception {
   int res = ToolRunner.run(NutchConfiguration.create(), new Injector(), args);
   System.exit(res);
 }
  public static void main(String[] args) throws Exception {
    System.out.println("Beginning job...");
    // Start phase 1
    Configuration conf = new Configuration();

    conf.set("mapred.job.tracker", "local");
    conf.set("fs.default.name", "local");

    String[] inputArgs = new GenericOptionsParser(conf, args).getRemainingArgs();

    Path xmlPath = null;
    List<String> other_args = new ArrayList<String>();
    for (int i = 0; i < args.length; ++i) {
      if ("-setup".equals(inputArgs[i])) {
        xmlPath = new Path(inputArgs[++i]);
        DistributedCache.addCacheFile(xmlPath.toUri(), conf);
        conf.setBoolean("minibatch.job.setup", true);

      } else {
        other_args.add(inputArgs[i]);
      }
    }

    String[] tool_args = other_args.toArray(new String[0]);
    int result = ToolRunner.run(conf, new BatchGenerationEngine(), tool_args);
    // End phase 1

    // get example count and size from xml file
    // count = count_size[0];
    // size = count_size[1];
    int[] count_size = parseJobSetup(xmlPath);

    // distribute those output from phase 1 into different directories
    String outputPhase1 = tool_args[1];
    FileSystem fs = FileSystem.get(new Configuration());
    Path outputPhase1Path = new Path(outputPhase1);
    fs.setWorkingDirectory(outputPhase1Path);
    FileStatus[] outputP1AllFiles = fs.listStatus(outputPhase1Path);
    for (int i = 0; i < outputP1AllFiles.length; i++) {
      int batchNum = i / count_size[1];
      Path batchPath = new Path(outputPhase1 + "/batch" + batchNum);

      // if batch# directory not exists, mkdir
      if (!fs.exists(batchPath)) FileSystem.mkdirs(fs, batchPath, new FsPermission("777"));
      // move file into the batch# directory
      fs.rename(
          outputP1AllFiles[i].getPath(),
          new Path(
              outputPhase1 + "/batch" + batchNum + "/" + outputP1AllFiles[i].getPath().getName()));
    }
    //

    // Generate dictionary of jobs
    int numberOfJobs = count_size[0] * count_size[1];
    JobConfig[] dictionary = new JobConfig[numberOfJobs];

    // Add job 0 to dictionary
    Configuration conf0 = new Configuration();
    DistributedCache.addCacheFile(xmlPath.toUri(), conf0);
    JobConfig job0 = new JobConfig(conf0, "input go here", java.util.UUID.randomUUID().toString());
    dictionary[0] = job0;

    // Add the rest of jobs into dictionary
    for (int i = 1; i < dictionary.length; i++) {
      Configuration newConf = new Configuration();
      DistributedCache.addCacheFile(xmlPath.toUri(), newConf);
      JobConfig newJob =
          new JobConfig(newConf, dictionary[i - 1].args[1], java.util.UUID.randomUUID().toString());
      dictionary[i] = newJob;
    }

    // running the jobs
    logger.info("Start " + dictionary.length + " jobs!");
    for (int i = 0; i < dictionary.length; i++) {
      int runResult =
          ToolRunner.run(dictionary[i].conf, new BatchGenerationEngine(), dictionary[i].args);
      if (runResult == 1) {
        logger.info("Job " + i + "-th Re-run once!");
        dictionary[i].args[1] = java.util.UUID.randomUUID().toString();
        runResult =
            ToolRunner.run(dictionary[i].conf, new BatchGenerationEngine(), dictionary[i].args);
      }
      if (runResult == 1) {
        logger.info("Job " + i + "-th Re-run twice!");
        dictionary[i].args[1] = java.util.UUID.randomUUID().toString();
        runResult =
            ToolRunner.run(dictionary[i].conf, new BatchGenerationEngine(), dictionary[i].args);
      }
      if (runResult == 1) {
        logger.info("Job " + i + "-th Failed!");
        break;
      } else {
        if (i - 1 < dictionary.length) dictionary[i + 1].args[0] = dictionary[i].args[1];
      }
    }

    System.exit(1);
  }
 public static void main(String[] args) throws Exception {
   int exitCode = ToolRunner.run(new MaxTemperatureDriver(), args);
   System.exit(exitCode);
 }
示例#17
0
 public static void main(String[] args) throws Exception {
   int rc = ToolRunner.run(new Configuration(), new KnownKeysMRv2(), args);
   System.exit(rc);
 }
示例#18
0
 public static void main(String[] args) throws Exception {
   int result = ToolRunner.run(new HadoopNBFilter(), args);
   System.exit(result);
 }
 public static void main(String[] args) throws Exception {
   int ret = ToolRunner.run(new ElimiateRepeat(), args);
   System.exit(ret);
 }
示例#20
0
  public int run(String[] args) throws Exception {
    // printUsage();
    /*
     * SETUP
     */
    Configuration argConf = getConf();
    Hashtable<String, String> confArg = new Hashtable<String, String>();
    setup(confArg, argConf);
    Date currentTime = new Date();
    Date endDate = new Date(new Long(confArg.get("timestamp_stop")));
    Boolean full_run = confArg.get("intermediate").matches("(?i).*true.*");
    Boolean quick_add = confArg.get("quick_add").matches("(?i).*true.*");
    logger.info("Running GeStore");

    // ZooKeeper setup
    Configuration config = HBaseConfiguration.create();
    zkWatcher = new ZooKeeperWatcher(config, "Testing", new HBaseAdmin(config));
    zkInstance =
        new ZooKeeper(
            ZKConfig.getZKQuorumServersString(config),
            config.getInt("zookeeper.session.timeout", -1),
            zkWatcher);

    if (!confArg.get("task_id").isEmpty()) {
      confArg.put("temp_path", confArg.get("temp_path") + confArg.get("task_id"));
    }

    String lockRequest = confArg.get("file_id");
    if (!confArg.get("run_id").isEmpty())
      lockRequest = lockRequest + "_" + confArg.get("run_id") + "_";
    if (!confArg.get("task_id").isEmpty())
      lockRequest = lockRequest + "_" + confArg.get("task_id") + "_";

    // Get type of movement
    toFrom type_move = checkArgs(confArg);
    if (type_move == toFrom.LOCAL2REMOTE && !confArg.get("format").equals("unknown")) {
      List<String> arguments = new ArrayList<String>();
      arguments.add("-Dinput=" + confArg.get("local_path"));
      arguments.add("-Dtable=" + confArg.get("file_id"));
      arguments.add("-Dtimestamp=" + confArg.get("timestamp_stop"));
      arguments.add("-Dtype=" + confArg.get("format"));
      arguments.add("-Dtarget_dir=" + confArg.get("base_path") + "_" + confArg.get("file_id"));
      arguments.add("-Dtemp_hdfs_path=" + confArg.get("temp_path"));
      arguments.add("-Drun_id=" + confArg.get("run_id"));
      if (!confArg.get("run_id").isEmpty()) arguments.add("-Drun_id=" + confArg.get("run_id"));
      if (!confArg.get("task_id").isEmpty()) arguments.add("-Dtask_id=" + confArg.get("task_id"));
      if (quick_add) arguments.add("-Dquick_add=" + confArg.get("quick_add"));
      String lockName = lock(lockRequest);
      String[] argumentString = arguments.toArray(new String[arguments.size()]);
      adddb.main(argumentString);
      unlock(lockName);
      System.exit(0);
    }

    // Database registration

    dbutil db_util = new dbutil(config);
    db_util.register_database(confArg.get("db_name_files"), true);
    db_util.register_database(confArg.get("db_name_runs"), true);
    db_util.register_database(confArg.get("db_name_updates"), true);
    FileSystem hdfs = FileSystem.get(config);
    FileSystem localFS = FileSystem.getLocal(config);

    // Get source type
    confArg.put("source", getSource(db_util, confArg.get("db_name_files"), confArg.get("file_id")));
    confArg.put(
        "database", isDatabase(db_util, confArg.get("db_name_files"), confArg.get("file_id")));
    if (!confArg.get("source").equals("local")
        && type_move == toFrom.REMOTE2LOCAL
        && !confArg.get("timestamp_stop").equals(Integer.toString(Integer.MAX_VALUE))) {
      confArg.put("timestamp_stop", Long.toString(latestVersion(confArg, db_util)));
    }

    /*
     * Get previous timestamp
     */
    Get run_id_get = new Get(confArg.get("run_id").getBytes());
    Result run_get = db_util.doGet(confArg.get("db_name_runs"), run_id_get);
    KeyValue run_file_prev =
        run_get.getColumnLatest(
            "d".getBytes(), (confArg.get("file_id") + "_db_timestamp").getBytes());
    String last_timestamp = new String("0");
    if (null != run_file_prev && !confArg.get("source").equals("local")) {
      long last_timestamp_real = run_file_prev.getTimestamp();
      Long current_timestamp = new Long(confArg.get("timestamp_real"));
      if ((current_timestamp - last_timestamp_real) > 36000) {
        last_timestamp = new String(run_file_prev.getValue());
        Integer lastTimestamp = new Integer(last_timestamp);
        lastTimestamp += 1;
        last_timestamp = lastTimestamp.toString();
        logger.info("Last timestamp: " + last_timestamp + " End data: " + endDate);
        Date last_run = new Date(run_file_prev.getTimestamp());
        if (last_run.before(endDate) && !full_run) {
          confArg.put("timestamp_start", last_timestamp);
        }
      }
    }

    Integer tse = new Integer(confArg.get("timestamp_stop"));
    Integer tss = new Integer(confArg.get("timestamp_start"));
    if (tss > tse) {
      logger.info("No new version of requested file.");
      return 0;
    }

    /*
     * Generate file
     */

    String lockName = lock(lockRequest);

    Get file_id_get = new Get(confArg.get("file_id").getBytes());
    Result file_get = db_util.doGet(confArg.get("db_name_files"), file_id_get);
    if (!file_get.isEmpty()) {
      boolean found =
          hasFile(
              db_util,
              hdfs,
              confArg.get("db_name_files"),
              confArg.get("file_id"),
              getFullPath(confArg));
      if (confArg.get("source").equals("fullfile")) {
        found = false;
      }
      String filenames_put =
          getFileNames(
              db_util, confArg.get("db_name_files"), confArg.get("file_id"), getFullPath(confArg));
      // Filename not found in file database
      if (!found && type_move == toFrom.REMOTE2LOCAL) {
        if (!confArg.get("source").equals("local")) {
          // Generate intermediate file
          if (getFile(hdfs, confArg, db_util) == null) {
            unlock(lockName);
            return 1;
          }
          // Put generated file into file database
          if (!confArg.get("format").equals("fullfile")) {
            putFileEntry(
                db_util,
                hdfs,
                confArg.get("db_name_files"),
                confArg.get("file_id"),
                confArg.get("full_file_name"),
                confArg.get("source"));
          }
        } else {
          logger.warn("Remote file not found, and cannot be generated! File: " + confArg);
          unlock(lockName);
          return 1;
        }
      }
    } else {
      if (type_move == toFrom.REMOTE2LOCAL) {
        logger.warn("Remote file not found, and cannot be generated.");
        unlock(lockName);
        return 1;
      }
    }

    /*
     * Copy file
     * Update tables
     */

    if (type_move == toFrom.LOCAL2REMOTE) {
      if (!confArg.get("format").equals("fullfile")) {
        putFileEntry(
            db_util,
            hdfs,
            confArg.get("db_name_files"),
            confArg.get("file_id"),
            getFullPath(confArg),
            confArg.get("source"));
      }
      putRunEntry(
          db_util,
          confArg.get("db_name_runs"),
          confArg.get("run_id"),
          confArg.get("file_id"),
          confArg.get("type"),
          confArg.get("timestamp_real"),
          confArg.get("timestamp_stop"),
          getFullPath(confArg),
          confArg.get("delimiter"));
      hdfs.copyFromLocalFile(new Path(confArg.get("local_path")), new Path(getFullPath(confArg)));
    } else if (type_move == toFrom.REMOTE2LOCAL) {
      FileStatus[] files = hdfs.globStatus(new Path(getFullPath(confArg) + "*"));
      putRunEntry(
          db_util,
          confArg.get("db_name_runs"),
          confArg.get("run_id"),
          confArg.get("file_id"),
          confArg.get("type"),
          confArg.get("timestamp_real"),
          confArg.get("timestamp_stop"),
          getFullPath(confArg),
          confArg.get("delimiter"));
      unlock(lockName);
      for (FileStatus file : files) {
        Path cur_file = file.getPath();
        Path cur_local_path =
            new Path(new String(confArg.get("local_path") + confArg.get("file_id")));
        String suffix = getSuffix(getFileName(confArg), cur_file.getName());
        if (suffix.length() > 0) {
          cur_local_path = cur_local_path.suffix(new String("." + suffix));
        }
        if (confArg.get("copy").equals("true")) {
          String crc = hdfs.getFileChecksum(cur_file).toString();
          if (checksumLocalTest(cur_local_path, crc)) {
            continue;
          } else {
            hdfs.copyToLocalFile(cur_file, cur_local_path);
            writeChecksum(cur_local_path, crc);
          }
        } else {
          System.out.println(cur_local_path + "\t" + cur_file);
        }
      }
    }
    unlock(lockName);
    return 0;
  }
示例#21
0
 public static void main(String[] args) throws Exception {
   int result = ToolRunner.run(new Configuration(), new move(), args);
   System.exit(result);
 }
 public static void main(String[] args) throws Exception {
   int exitCode = ToolRunner.run(new HBaseConfiguration(), new HBaseStationImporter(), args);
   System.exit(exitCode);
 }
 public static void main(String[] args) throws Exception {
   int exitCode = ToolRunner.run(new SortByTemperatureUsingTotalOrderPartitioner(), args);
   System.exit(exitCode);
 }