@Test public void testTableWithCFNameStartWithUnderScore() throws Exception { Path dir = util.getDataTestDirOnTestFS("cfNameStartWithUnderScore"); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs.getUri(), fs.getWorkingDirectory()); String family = "_cf"; Path familyDir = new Path(dir, family); byte[] from = Bytes.toBytes("begin"); byte[] to = Bytes.toBytes("end"); Configuration conf = util.getConfiguration(); String tableName = "mytable_cfNameStartWithUnderScore"; Table table = util.createTable(TableName.valueOf(tableName), family); HFileTestUtil.createHFile( conf, fs, new Path(familyDir, "hfile"), Bytes.toBytes(family), QUALIFIER, from, to, 1000); LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf); String[] args = {dir.toString(), tableName}; try { loader.run(args); assertEquals(1000, util.countRows(table)); } finally { if (null != table) { table.close(); } } }
@Test public void testLoadTooMayHFiles() throws Exception { Path dir = util.getDataTestDirOnTestFS("testLoadTooMayHFiles"); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs); Path familyDir = new Path(dir, Bytes.toString(FAMILY)); byte[] from = Bytes.toBytes("begin"); byte[] to = Bytes.toBytes("end"); for (int i = 0; i <= MAX_FILES_PER_REGION_PER_FAMILY; i++) { HFileTestUtil.createHFile( util.getConfiguration(), fs, new Path(familyDir, "hfile_" + i), FAMILY, QUALIFIER, from, to, 1000); } LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()); String[] args = {dir.toString(), "mytable_testLoadTooMayHFiles"}; try { loader.run(args); fail("Bulk loading too many files should fail"); } catch (IOException ie) { assertTrue( ie.getMessage() .contains("Trying to load more than " + MAX_FILES_PER_REGION_PER_FAMILY + " hfiles")); } }
@Test(expected = TableNotFoundException.class) public void testWithoutAnExistingTableAndCreateTableSetToNo() throws Exception { Configuration conf = util.getConfiguration(); conf.set(LoadIncrementalHFiles.CREATE_TABLE_CONF_KEY, "no"); LoadIncrementalHFiles loader = new LoadIncrementalHFiles(conf); String[] args = {"directory", "nonExistingTable"}; loader.run(args); }
private void runTest( String testName, HTableDescriptor htd, BloomType bloomType, boolean preCreateTable, byte[][] tableSplitKeys, byte[][][] hfileRanges) throws Exception { Path dir = util.getDataTestDirOnTestFS(testName); FileSystem fs = util.getTestFileSystem(); dir = dir.makeQualified(fs); Path familyDir = new Path(dir, Bytes.toString(FAMILY)); int hfileIdx = 0; for (byte[][] range : hfileRanges) { byte[] from = range[0]; byte[] to = range[1]; HFileTestUtil.createHFile( util.getConfiguration(), fs, new Path(familyDir, "hfile_" + hfileIdx++), FAMILY, QUALIFIER, from, to, 1000); } int expectedRows = hfileIdx * 1000; if (preCreateTable) { util.getHBaseAdmin().createTable(htd, tableSplitKeys); } final TableName tableName = htd.getTableName(); LoadIncrementalHFiles loader = new LoadIncrementalHFiles(util.getConfiguration()); String[] args = {dir.toString(), tableName.toString()}; loader.run(args); Table table = new HTable(util.getConfiguration(), tableName); try { assertEquals(expectedRows, util.countRows(table)); } finally { table.close(); } // verify staging folder has been cleaned up Path stagingBasePath = SecureBulkLoadUtil.getBaseStagingDir(util.getConfiguration()); if (fs.exists(stagingBasePath)) { FileStatus[] files = fs.listStatus(stagingBasePath); for (FileStatus file : files) { assertTrue( "Folder=" + file.getPath() + " is not cleaned up.", file.getPath().getName() != "DONOTERASE"); } } util.deleteTable(tableName); }
@Test public void testSplitStoreFile() throws IOException { Path dir = util.getDataTestDirOnTestFS("testSplitHFile"); FileSystem fs = util.getTestFileSystem(); Path testIn = new Path(dir, "testhfile"); HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY); HFileTestUtil.createHFile( util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000); Path bottomOut = new Path(dir, "bottom.out"); Path topOut = new Path(dir, "top.out"); LoadIncrementalHFiles.splitStoreFile( util.getConfiguration(), testIn, familyDesc, Bytes.toBytes("ggg"), bottomOut, topOut); int rowCount = verifyHFile(bottomOut); rowCount += verifyHFile(topOut); assertEquals(1000, rowCount); }
@Test public void testInferBoundaries() { TreeMap<byte[], Integer> map = new TreeMap<byte[], Integer>(Bytes.BYTES_COMPARATOR); /* Toy example * c---------i o------p s---------t v------x * a------e g-----k m-------------q r----s u----w * * Should be inferred as: * a-----------------k m-------------q r--------------t u---------x * * The output should be (m,r,u) */ String first; String last; first = "a"; last = "e"; addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); first = "r"; last = "s"; addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); first = "o"; last = "p"; addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); first = "g"; last = "k"; addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); first = "v"; last = "x"; addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); first = "c"; last = "i"; addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); first = "m"; last = "q"; addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); first = "s"; last = "t"; addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); first = "u"; last = "w"; addStartEndKeysForTest(map, first.getBytes(), last.getBytes()); byte[][] keysArray = LoadIncrementalHFiles.inferBoundaries(map); byte[][] compare = new byte[3][]; compare[0] = "m".getBytes(); compare[1] = "r".getBytes(); compare[2] = "u".getBytes(); assertEquals(keysArray.length, 3); for (int row = 0; row < keysArray.length; row++) { assertArrayEquals(keysArray[row], compare[row]); } }