/** * Gets a set of input splits for a MapReduce job running over a Kiji table. One split is created * per region in the input Kiji table. * * @param configuration of the job using the splits. The configuration should specify the input * Kiji table being used, through the configuration variable {@link * KijiConfKeys#KIJI_INPUT_TABLE_URI}. * @param numSplits desired for the job. This framework hint is ignored by this method. * @return an array of input splits to be operated on in the MapReduce job. * @throws IOException if an I/O error occurs while communicating with HBase to determine the * regions in the Kiji table. */ @Override public InputSplit[] getSplits(JobConf configuration, int numSplits) throws IOException { final String uriString = Preconditions.checkNotNull(configuration.get(KijiConfKeys.KIJI_INPUT_TABLE_URI)); final KijiURI inputTableURI = KijiURI.newBuilder(uriString).build(); final Kiji kiji = Kiji.Factory.open(inputTableURI, configuration); try { final KijiTable table = kiji.openTable(inputTableURI.getTable()); try { final HTableInterface htable = HBaseKijiTable.downcast(table).getHTable(); final List<InputSplit> splits = Lists.newArrayList(); for (KijiRegion region : table.getRegions()) { final byte[] startKey = region.getStartKey(); // TODO(KIJIMR-65): For now pick the first available location (ie. region server), if any. final String location = region.getLocations().isEmpty() ? null : region.getLocations().iterator().next(); final TableSplit tableSplit = new TableSplit(htable.getTableName(), startKey, region.getEndKey(), location); splits.add(new KijiTableSplit(tableSplit)); } return splits.toArray(new InputSplit[0]); } finally { table.release(); } } finally { kiji.release(); } }
/** * Generates a split for a given table. * * @param tableURI URI of the Kiji table to split. * @param nsplits Number of splits. * @param conf Base Hadoop configuration used to open the Kiji instance. * @return a list of split start keys, as HFileKeyValue (with no value, just the keys). * @throws IOException on I/O error. */ private static List<HFileKeyValue> makeTableKeySplit( KijiURI tableURI, int nsplits, Configuration conf) throws IOException { final Kiji kiji = Kiji.Factory.open(tableURI, conf); try { final KijiTable table = kiji.openTable(tableURI.getTable()); try { if (NUM_SPLITS_AUTO == nsplits) { final List<HFileKeyValue> startKeys = Lists.newArrayList(); for (KijiRegion region : table.getRegions()) { startKeys.add(HFileKeyValue.createFromRowKey(region.getStartKey())); } return startKeys; } else { switch (KijiTableLayout.getEncoding(table.getLayout().getDesc().getKeysFormat())) { case RAW: { // The user has explicitly specified how many HFiles to create, but this is not // possible when row key hashing is disabled. throw new JobConfigurationException( String.format( "Table '%s' has row key hashing disabled, so the number of HFile splits must be" + "determined by the number of HRegions in the HTable. " + "Use an HFileMapReduceJobOutput constructor that enables auto splitting.", table.getName())); } case FORMATTED: case HASH: case HASH_PREFIX: { // Those cases are supported: break; } default: throw new RuntimeException( "Unhandled row key encoding: " + KijiTableLayout.getEncoding(table.getLayout().getDesc().getKeysFormat())); } return generateEvenStartKeys(nsplits); } } finally { ResourceUtils.releaseOrLog(table); } } finally { ResourceUtils.releaseOrLog(kiji); } }