@Test
  public void testGetReplicaCount() throws IOException {
    Path testPath = new Path(TestUtils.createTempDir().getAbsolutePath());
    FileSystem fs = testPath.getFileSystem(new Configuration());
    fs.mkdirs(testPath);

    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 0);

    fs.create(new Path(testPath, "0_0_1.data"));
    fs.create(new Path(testPath, "0_0_1data"));
    fs.create(new Path(testPath, "0_0_2.index"));
    fs.create(new Path(testPath, "0_0.data"));
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0).length, 1);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 0, 1).length, 1);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 1).length, 0);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 0, 1).length, 0);

    fs.create(new Path(testPath, "1_0_0.data"));
    fs.create(new Path(testPath, "1_0"));
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0).length, 1);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 0).length, 1);

    fs.create(new Path(testPath, "1_0_1.data"));
    fs.create(new Path(testPath, "1_0_1data"));
    fs.create(new Path(testPath, "1_0_1.index"));
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0).length, 2);
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0, 1).length, 1);

    fs.create(new Path(testPath, "1_0_2.data"));
    assertEquals(HadoopStoreBuilderUtils.getDataChunkFiles(fs, testPath, 1, 0).length, 3);
  }
  @Test
  public void testReadFileContents() throws Exception {

    Path testPath = new Path(TestUtils.createTempDir().getAbsolutePath(), "tempFile");
    FileSystem fs = testPath.getFileSystem(new Configuration());
    fs.create(testPath);

    // 1) Read back empty file
    String emptyString = HadoopStoreBuilderUtils.readFileContents(fs, testPath, 1024);
    Assert.assertEquals(emptyString.length(), 0);

    // 2) Read back random bytes
    byte[] randomBytes = writeRandomData(testPath, 10);

    // Read back data
    Assert.assertEquals(
        HadoopStoreBuilderUtils.readFileContents(fs, testPath, 1024), new String(randomBytes));

    // 3) Write a json string
    fs.delete(testPath, true);
    fs.create(testPath);

    ReadOnlyStorageMetadata metadata = new ReadOnlyStorageMetadata();
    metadata.add(ReadOnlyStorageMetadata.FORMAT, ReadOnlyStorageFormat.READONLY_V2.getCode());

    // Write file contents
    new FileOutputStream(testPath.toString()).write(metadata.toJsonString().getBytes());

    ReadOnlyStorageMetadata readMetadata =
        new ReadOnlyStorageMetadata(HadoopStoreBuilderUtils.readFileContents(fs, testPath, 1024));
    Assert.assertEquals(
        readMetadata.get(ReadOnlyStorageMetadata.FORMAT),
        ReadOnlyStorageFormat.READONLY_V2.getCode());
  }
 @Test
 public void testGetChunkId() {
   assertEquals(HadoopStoreBuilderUtils.getChunkId("0_0.d"), 0);
   assertEquals(HadoopStoreBuilderUtils.getChunkId("0_1.d"), 1);
   assertEquals(HadoopStoreBuilderUtils.getChunkId("0_100.d"), 100);
   assertEquals(HadoopStoreBuilderUtils.getChunkId("10_83_674.d"), 674);
   assertEquals(HadoopStoreBuilderUtils.getChunkId("0_10_30.d"), 30);
 }
  @Test
  public void testIsFormatCorrect() {
    Logger.getRootLogger().removeAllAppenders();

    assertFalse(HadoopStoreBuilderUtils.isFormatCorrect("0", ReadOnlyStorageFormat.READONLY_V0));
    assertFalse(HadoopStoreBuilderUtils.isFormatCorrect("0_", ReadOnlyStorageFormat.READONLY_V0));
    assertFalse(HadoopStoreBuilderUtils.isFormatCorrect("0_0", ReadOnlyStorageFormat.READONLY_V0));
    assertFalse(HadoopStoreBuilderUtils.isFormatCorrect("0_0.", ReadOnlyStorageFormat.READONLY_V0));
    assertFalse(
        HadoopStoreBuilderUtils.isFormatCorrect("0_0index", ReadOnlyStorageFormat.READONLY_V0));
    assertTrue(
        HadoopStoreBuilderUtils.isFormatCorrect("0_0.index", ReadOnlyStorageFormat.READONLY_V0));
    assertTrue(
        HadoopStoreBuilderUtils.isFormatCorrect("10_0.data", ReadOnlyStorageFormat.READONLY_V0));
    assertTrue(
        HadoopStoreBuilderUtils.isFormatCorrect("10_10.data", ReadOnlyStorageFormat.READONLY_V0));
    assertFalse(
        HadoopStoreBuilderUtils.isFormatCorrect("10_10.dat", ReadOnlyStorageFormat.READONLY_V0));
    assertFalse(
        HadoopStoreBuilderUtils.isFormatCorrect("10_10.inde", ReadOnlyStorageFormat.READONLY_V0));

    assertFalse(HadoopStoreBuilderUtils.isFormatCorrect("0", ReadOnlyStorageFormat.READONLY_V2));
    assertFalse(HadoopStoreBuilderUtils.isFormatCorrect("0_", ReadOnlyStorageFormat.READONLY_V2));
    assertFalse(HadoopStoreBuilderUtils.isFormatCorrect("0_0", ReadOnlyStorageFormat.READONLY_V2));
    assertFalse(HadoopStoreBuilderUtils.isFormatCorrect("0_0.", ReadOnlyStorageFormat.READONLY_V2));
    assertFalse(
        HadoopStoreBuilderUtils.isFormatCorrect("0_0.index", ReadOnlyStorageFormat.READONLY_V2));
    assertTrue(
        HadoopStoreBuilderUtils.isFormatCorrect("0_0_0.index", ReadOnlyStorageFormat.READONLY_V2));
    assertFalse(
        HadoopStoreBuilderUtils.isFormatCorrect("0_0_0index", ReadOnlyStorageFormat.READONLY_V2));
    assertFalse(
        HadoopStoreBuilderUtils.isFormatCorrect("10_0.d", ReadOnlyStorageFormat.READONLY_V2));
    assertFalse(
        HadoopStoreBuilderUtils.isFormatCorrect("10_10.d", ReadOnlyStorageFormat.READONLY_V2));
    assertTrue(
        HadoopStoreBuilderUtils.isFormatCorrect(
            "10_10_0.index", ReadOnlyStorageFormat.READONLY_V2));
    assertFalse(
        HadoopStoreBuilderUtils.isFormatCorrect("10_10_0.inde", ReadOnlyStorageFormat.READONLY_V2));
    assertFalse(
        HadoopStoreBuilderUtils.isFormatCorrect("10_10_0.dat", ReadOnlyStorageFormat.READONLY_V2));
  }
  @Test
  public void testGetDataFileChunkSet() throws IOException {

    Path headPath = new Path(TestUtils.createTempDir().getAbsolutePath());
    Path testPath = new Path(headPath, "0_0_100.data");
    Path junkPath = new Path(headPath, "1_1_100.data");
    FileSystem fs = testPath.getFileSystem(new Configuration());

    // 1) Just one correct file
    fs.create(testPath);
    fs.create(junkPath);
    writeRandomData(testPath, 100);
    DataFileChunkSet set =
        HadoopStoreBuilderUtils.getDataFileChunkSet(
            fs, HadoopStoreBuilderUtils.getDataChunkFiles(fs, headPath, 0, 0));
    assertEquals(set.getNumChunks(), 1);
    assertEquals(set.getDataFileSize(0), 100);

    // 2) Another correct file
    testPath = new Path(headPath, "0_0_99.data");
    fs.create(testPath);
    writeRandomData(testPath, 99);
    set =
        HadoopStoreBuilderUtils.getDataFileChunkSet(
            fs, HadoopStoreBuilderUtils.getDataChunkFiles(fs, headPath, 0, 0));
    assertEquals(set.getNumChunks(), 2);
    assertEquals(set.getDataFileSize(0), 99);
    assertEquals(set.getDataFileSize(1), 100);

    // 3) Add some more files
    testPath = new Path(headPath, "0_0_1.data");
    fs.create(testPath);
    writeRandomData(testPath, 1);

    testPath = new Path(headPath, "0_0_10.data");
    fs.create(testPath);
    writeRandomData(testPath, 10);

    testPath = new Path(headPath, "0_0_999.data");
    fs.create(testPath);
    writeRandomData(testPath, 999);

    testPath = new Path(headPath, "0_0_101.data");
    fs.create(testPath);
    writeRandomData(testPath, 101);

    testPath = new Path(headPath, "0_0_1000.data");
    fs.create(testPath);
    writeRandomData(testPath, 1000);

    set =
        HadoopStoreBuilderUtils.getDataFileChunkSet(
            fs, HadoopStoreBuilderUtils.getDataChunkFiles(fs, headPath, 0, 0));
    assertEquals(set.getNumChunks(), 7);
    assertEquals(set.getDataFileSize(0), 1);
    assertEquals(set.getDataFileSize(1), 10);
    assertEquals(set.getDataFileSize(2), 99);
    assertEquals(set.getDataFileSize(3), 100);
    assertEquals(set.getDataFileSize(4), 101);
    assertEquals(set.getDataFileSize(5), 999);
    assertEquals(set.getDataFileSize(6), 1000);
  }