private void runTestOnTable(HTable table) throws IOException, InterruptedException, ClassNotFoundException { Job job = null; try { LOG.info("Before map/reduce startup"); job = new Job(table.getConfiguration(), "process column contents"); job.setNumReduceTasks(1); Scan scan = new Scan(); scan.addFamily(INPUT_FAMILY); TableMapReduceUtil.initTableMapperJob( Bytes.toString(table.getTableName()), scan, ProcessContentsMapper.class, ImmutableBytesWritable.class, Put.class, job); TableMapReduceUtil.initTableReducerJob( Bytes.toString(table.getTableName()), IdentityTableReducer.class, job); FileOutputFormat.setOutputPath(job, new Path("test")); LOG.info("Started " + Bytes.toString(table.getTableName())); assertTrue(job.waitForCompletion(true)); LOG.info("After map/reduce completion"); // verify map-reduce results verify(Bytes.toString(table.getTableName())); } finally { table.close(); if (job != null) { FileUtil.fullyDelete(new File(job.getConfiguration().get("hadoop.tmp.dir"))); } } }
public H2MetaTable(Database database) throws Exception { createTableIfNotExists(); this.database = database; table = new HTable(HBaseUtils.getConfiguration(), TABLE_NAME); watcher = new ZooKeeperWatcher(table.getConfiguration(), "H2MetaTableWatcher", this); tracker = new H2MetaTableTracker(watcher, this); tracker.start(); }
public static void loadTable(HTable table, String path) throws IOException, ClassNotFoundException, InterruptedException, Exception { LoadIncrementalHFiles loader = new LoadIncrementalHFiles(table.getConfiguration()); loadTable(table, loader, path); }
public static Map<String, HRegionContent> getRegionMap(HTable table) throws Exception { Configuration conf = table.getConfiguration(); HBaseAdmin admin = new HBaseAdmin(conf); Map<String, HRegionContent> resultMap = getRegionMap(table, admin); return resultMap; }