@Override public int run(String[] args) throws Exception { JobConf conf = new JobConf(getConf(), getClass()); conf.setJobName("UFO count"); String[] otherArgs = new GenericOptionsParser(conf, args).getRemainingArgs(); if (otherArgs.length != 2) { System.err.println("Usage: avro UFO counter <in> <out>"); System.exit(2); } FileInputFormat.addInputPath(conf, new Path(otherArgs[0])); Path outputPath = new Path(otherArgs[1]); FileOutputFormat.setOutputPath(conf, outputPath); outputPath.getFileSystem(conf).delete(outputPath); Schema input_schema = Schema.parse(getClass().getResourceAsStream("ufo.avsc")); AvroJob.setInputSchema(conf, input_schema); AvroJob.setMapOutputSchema( conf, Pair.getPairSchema(Schema.create(Schema.Type.STRING), Schema.create(Schema.Type.LONG))); AvroJob.setOutputSchema(conf, OUTPUT_SCHEMA); AvroJob.setMapperClass(conf, AvroRecordMapper.class); AvroJob.setReducerClass(conf, AvroRecordReducer.class); conf.setInputFormat(AvroInputFormat.class); JobClient.runJob(conf); return 0; }
public static boolean stopIteration(Configuration conf) throws IOException { FileSystem fs = FileSystem.get(conf); Path preFile = new Path("preX/Result"); Path curFile = new Path("curX/part-00000"); if (!(fs.exists(preFile) && fs.exists(curFile))) { System.exit(1); } boolean stop = true; String line1, line2; FSDataInputStream in1 = fs.open(preFile); FSDataInputStream in2 = fs.open(curFile); InputStreamReader isr1 = new InputStreamReader(in1); InputStreamReader isr2 = new InputStreamReader(in2); BufferedReader br1 = new BufferedReader(isr1); BufferedReader br2 = new BufferedReader(isr2); while ((line1 = br1.readLine()) != null && (line2 = br2.readLine()) != null) { String[] str1 = line1.split("\\s+"); String[] str2 = line2.split("\\s+"); double preElem = Double.parseDouble(str1[1]); double curElem = Double.parseDouble(str2[1]); if (Math.abs(preElem - curElem) > eps) { stop = false; break; } } if (stop == false) { fs.delete(preFile, true); if (fs.rename(curFile, preFile) == false) { System.exit(1); } } return stop; }
public static void main(String[] args) throws Exception { int megaBytes = 10; int files = 100; boolean noRead = false; boolean noWrite = false; boolean noSeek = false; boolean fastCheck = false; long seed = new Random().nextLong(); String usage = "Usage: TestFileSystem -files N -megaBytes M [-noread] [-nowrite] [-noseek] [-fastcheck]"; if (args.length == 0) { System.err.println(usage); System.exit(-1); } for (int i = 0; i < args.length; i++) { // parse command line if (args[i].equals("-files")) { files = Integer.parseInt(args[++i]); } else if (args[i].equals("-megaBytes")) { megaBytes = Integer.parseInt(args[++i]); } else if (args[i].equals("-noread")) { noRead = true; } else if (args[i].equals("-nowrite")) { noWrite = true; } else if (args[i].equals("-noseek")) { noSeek = true; } else if (args[i].equals("-fastcheck")) { fastCheck = true; } } LOG.info("seed = " + seed); LOG.info("files = " + files); LOG.info("megaBytes = " + megaBytes); FileSystem fs = FileSystem.get(conf); if (!noWrite) { createControlFile(fs, megaBytes * MEGA, files, seed); writeTest(fs, fastCheck); } if (!noRead) { readTest(fs, fastCheck); } if (!noSeek) { seekTest(fs, fastCheck); } }
public static void main(String[] args) throws IOException { if (args.length != 2) { System.err.println("Usage: OldMaxTemperature <input path> <output path>"); System.exit(-1); } /*[*/ JobConf conf = new JobConf(OldMaxTemperature.class); /*]*/ /*[*/ conf /*]*/.setJobName("Max temperature"); FileInputFormat.addInputPath(/*[*/ conf /*]*/, new Path(args[0])); FileOutputFormat.setOutputPath(/*[*/ conf /*]*/, new Path(args[1])); /*[*/ conf /*]*/.setMapperClass(OldMaxTemperatureMapper.class); /*[*/ conf /*]*/.setReducerClass(OldMaxTemperatureReducer.class); /*[*/ conf /*]*/.setOutputKeyClass(Text.class); /*[*/ conf /*]*/.setOutputValueClass(IntWritable.class); /*[*/ JobClient.runJob(conf); /*]*/ }
public void configure(JobConf job) { // 'key' == sortInput for sort-input; key == sortOutput for sort-output key = deduceInputFile(job); if (key == sortOutput) { partitioner = new HashPartitioner<WritableComparable, Writable>(); // Figure the 'current' partition and no. of reduces of the 'sort' try { URI inputURI = new URI(job.get("map.input.file")); String inputFile = inputURI.getPath(); partition = Integer.valueOf(inputFile.substring(inputFile.lastIndexOf("part") + 5)).intValue(); noSortReducers = job.getInt("sortvalidate.sort.reduce.tasks", -1); } catch (Exception e) { System.err.println("Caught: " + e); System.exit(-1); } } }
public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new XiangLi1_exercise3(), args); System.exit(res); }
public static void main(String[] args) { int testType = TEST_TYPE_READ; int bufferSize = DEFAULT_BUFFER_SIZE; int fileSize = 1; int nrFiles = 1; String resFileName = DEFAULT_RES_FILE_NAME; boolean isSequential = false; String version = "DFSCIOTest.0.0.1"; String usage = "Usage: DFSCIOTest -read | -write | -clean [-nrFiles N] [-fileSize MB] [-resFile resultFileName] [-bufferSize Bytes] "; System.out.println(version); if (args.length == 0) { System.err.println(usage); System.exit(-1); } for (int i = 0; i < args.length; i++) { // parse command line if (args[i].startsWith("-r")) { testType = TEST_TYPE_READ; } else if (args[i].startsWith("-w")) { testType = TEST_TYPE_WRITE; } else if (args[i].startsWith("-clean")) { testType = TEST_TYPE_CLEANUP; } else if (args[i].startsWith("-seq")) { isSequential = true; } else if (args[i].equals("-nrFiles")) { nrFiles = Integer.parseInt(args[++i]); } else if (args[i].equals("-fileSize")) { fileSize = Integer.parseInt(args[++i]); } else if (args[i].equals("-bufferSize")) { bufferSize = Integer.parseInt(args[++i]); } else if (args[i].equals("-resFile")) { resFileName = args[++i]; } } LOG.info("nrFiles = " + nrFiles); LOG.info("fileSize (MB) = " + fileSize); LOG.info("bufferSize = " + bufferSize); try { fsConfig.setInt("test.io.file.buffer.size", bufferSize); FileSystem fs = FileSystem.get(fsConfig); if (testType != TEST_TYPE_CLEANUP) { fs.delete(HDFS_TEST_DIR, true); if (!fs.mkdirs(HDFS_TEST_DIR)) { throw new IOException("Mkdirs failed to create " + HDFS_TEST_DIR.toString()); } // Copy the executables over to the remote filesystem String hadoopHome = System.getenv("HADOOP_PREFIX"); fs.copyFromLocalFile( new Path(hadoopHome + "/libhdfs/libhdfs.so." + HDFS_LIB_VERSION), HDFS_SHLIB); fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/hdfs_read"), HDFS_READ); fs.copyFromLocalFile(new Path(hadoopHome + "/libhdfs/hdfs_write"), HDFS_WRITE); } if (isSequential) { long tStart = System.currentTimeMillis(); sequentialTest(fs, testType, fileSize, nrFiles); long execTime = System.currentTimeMillis() - tStart; String resultLine = "Seq Test exec time sec: " + (float) execTime / 1000; LOG.info(resultLine); return; } if (testType == TEST_TYPE_CLEANUP) { cleanup(fs); return; } createControlFile(fs, fileSize, nrFiles); long tStart = System.currentTimeMillis(); if (testType == TEST_TYPE_WRITE) writeTest(fs); if (testType == TEST_TYPE_READ) readTest(fs); long execTime = System.currentTimeMillis() - tStart; analyzeResult(fs, testType, execTime, resFileName); } catch (Exception e) { System.err.print(e.getLocalizedMessage()); System.exit(-1); } }
public static void main(String[] args) throws Exception { int exitCode = ToolRunner.run(new MaxTemperatureDriver(), args); System.exit(exitCode); }
public static void main(String[] args) throws Exception { JobConf job = new JobConf(DistCp.class); DistCp distcp = new DistCp(job); int res = ToolRunner.run(distcp, args); System.exit(res); }
public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new FileTest(), args); System.exit(res); }
static void printUsage() { System.out.println("kmeans [-m <maps>] [-r <reduces>] <input> <output>"); System.exit(1); }
public static void main(String[] args) throws Exception { int res = ToolRunner.run(new Configuration(), new UserViewMuliHostStepThreeGroup(), args); System.exit(res); }
public static void main(String[] args) throws Exception { int exitCode = ToolRunner.run(new SortByTemperatureUsingTotalOrderPartitioner(), args); System.exit(exitCode); }
public static void main(String[] args) throws Exception { int res = ToolRunner.run(NutchConfiguration.create(), new Injector(), args); System.exit(res); }
public static void main(String[] args) throws Exception { int exitCode = ToolRunner.run(new PartitionByStationUsingMultipleOutputs(), args); System.exit(exitCode); }
static void printUsage() { System.err.println( "sortvalidate [-m <maps>] [-r <reduces>] [-deep] " + "-sortInput <sort-input-dir> -sortOutput <sort-output-dir>"); System.exit(1); }