private static void run(BenchmarkArgs parsedArgs) throws TTransportException, IOException, InterruptedException { HiveInputDescription input = new HiveInputDescription(); input.setDbName(parsedArgs.getDatabase()); input.setTableName(parsedArgs.getTable()); input.setPartitionFilter(parsedArgs.getPartitionFilter()); HiveConf hiveConf = new HiveConf(InputBenchmark.class); ThriftHiveMetastore.Iface client = HiveMetastores.create(parsedArgs.getHiveHost(), parsedArgs.getHivePort()); System.err.println("Initialize profile with input data"); HiveApiInputFormat.setProfileInputDesc(hiveConf, input, HiveApiInputFormat.DEFAULT_PROFILE_ID); HiveApiInputFormat defaultInputFormat = new HiveApiInputFormat(); if (parsedArgs.isTrackMetrics()) { defaultInputFormat.setObserver( new MetricsObserver("default", parsedArgs.getRecordPrintPeriod())); } List<InputSplit> splits = defaultInputFormat.getSplits(hiveConf, client); System.err.println("getSplits returned " + splits.size() + " splits"); long numRows = 0; for (int i = 0; i < splits.size(); ++i) { InputSplit split = splits.get(i); TaskAttemptID taskID = new TaskAttemptID(); TaskAttemptContext taskContext = new TaskAttemptContext(hiveConf, taskID); if (i % parsedArgs.getSplitPrintPeriod() == 0) { System.err.println("Handling split " + i + " of " + splits.size()); } RecordReader<WritableComparable, HiveReadableRecord> reader = defaultInputFormat.createRecordReader(split, taskContext); reader.initialize(split, taskContext); numRows += readFully(reader); } System.err.println("Parsed " + numRows + " rows"); }