@Test
  public void testGetReplicaCount() throws IOException {
    Path testPath = new Path(TestUtils.createTempDir().getAbsolutePath());
    FileSystem fs = testPath.getFileSystem(new Configuration());
    fs.mkdirs(testPath);

    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 0);

    fs.create(new Path(testPath, "0_0_1.data"));
    fs.create(new Path(testPath, "0_0_1data"));
    fs.create(new Path(testPath, "0_0_2.index"));
    fs.create(new Path(testPath, "0_0.data"));
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 1);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0, 1).length, 1);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 1).length, 0);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 1).length, 0);

    fs.create(new Path(testPath, "1_0_0.data"));
    fs.create(new Path(testPath, "1_0"));
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0).length, 1);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 0).length, 1);

    fs.create(new Path(testPath, "1_0_1.data"));
    fs.create(new Path(testPath, "1_0_1data"));
    fs.create(new Path(testPath, "1_0_1.index"));
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0).length, 2);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 1).length, 1);

    fs.create(new Path(testPath, "1_0_2.data"));
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0).length, 3);
  }
  @Test
  public void testGetDataFileChunkSet() throws IOException {

    Path headPath = new Path(TestUtils.createTempDir().getAbsolutePath());
    Path testPath = new Path(headPath, "0_0_100.data");
    Path junkPath = new Path(headPath, "1_1_100.data");
    FileSystem fs = testPath.getFileSystem(new Configuration());

    // 1) Just one correct file
    fs.create(testPath);
    fs.create(junkPath);
    writeRandomData(testPath, 100);
    DataFileChunkSet set =
        HadoopStoreBuilderUtils.getDataFileChunkSet(
            fs, HadoopStoreBuilderUtils.getDataChunkFiles(fs, headPath, 0, 0));
    assertEquals(set.getNumChunks(), 1);
    assertEquals(set.getDataFileSize(0), 100);

    // 2) Another correct file
    testPath = new Path(headPath, "0_0_99.data");
    fs.create(testPath);
    writeRandomData(testPath, 99);
    set =
        HadoopStoreBuilderUtils.getDataFileChunkSet(
            fs, HadoopStoreBuilderUtils.getDataChunkFiles(fs, headPath, 0, 0));
    assertEquals(set.getNumChunks(), 2);
    assertEquals(set.getDataFileSize(0), 99);
    assertEquals(set.getDataFileSize(1), 100);

    // 3) Add some more files
    testPath = new Path(headPath, "0_0_1.data");
    fs.create(testPath);
    writeRandomData(testPath, 1);

    testPath = new Path(headPath, "0_0_10.data");
    fs.create(testPath);
    writeRandomData(testPath, 10);

    testPath = new Path(headPath, "0_0_999.data");
    fs.create(testPath);
    writeRandomData(testPath, 999);

    testPath = new Path(headPath, "0_0_101.data");
    fs.create(testPath);
    writeRandomData(testPath, 101);

    testPath = new Path(headPath, "0_0_1000.data");
    fs.create(testPath);
    writeRandomData(testPath, 1000);

    set =
        HadoopStoreBuilderUtils.getDataFileChunkSet(
            fs, HadoopStoreBuilderUtils.getDataChunkFiles(fs, headPath, 0, 0));
    assertEquals(set.getNumChunks(), 7);
    assertEquals(set.getDataFileSize(0), 1);
    assertEquals(set.getDataFileSize(1), 10);
    assertEquals(set.getDataFileSize(2), 99);
    assertEquals(set.getDataFileSize(3), 100);
    assertEquals(set.getDataFileSize(4), 101);
    assertEquals(set.getDataFileSize(5), 999);
    assertEquals(set.getDataFileSize(6), 1000);
  }