private void testEncodersOnDataset(
      List<KeyValue> kvList, boolean includesMemstoreTS, boolean includesTags) throws IOException {
    ByteBuffer unencodedDataBuf =
        RedundantKVGenerator.convertKvToByteBuffer(kvList, includesMemstoreTS);
    HFileContext fileContext =
        new HFileContextBuilder()
            .withIncludesMvcc(includesMemstoreTS)
            .withIncludesTags(includesTags)
            .build();
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
      DataBlockEncoder encoder = encoding.getEncoder();
      if (encoder == null) {
        continue;
      }
      HFileBlockEncodingContext encodingContext =
          new HFileBlockDefaultEncodingContext(
              encoding, HConstants.HFILEBLOCK_DUMMY_HEADER, fileContext);

      ByteArrayOutputStream baos = new ByteArrayOutputStream();
      baos.write(HConstants.HFILEBLOCK_DUMMY_HEADER);
      DataOutputStream dos = new DataOutputStream(baos);
      encoder.startBlockEncoding(encodingContext, dos);
      for (KeyValue kv : kvList) {
        encoder.encode(kv, encodingContext, dos);
      }
      encoder.endBlockEncoding(encodingContext, dos, baos.getBuffer());
      byte[] encodedData = baos.toByteArray();

      testAlgorithm(encodedData, unencodedDataBuf, encoder);
    }
  }
  /**
   * Test whether the decompression of first key is implemented correctly.
   *
   * @throws IOException
   */
  @Test
  public void testFirstKeyInBlockOnSample() throws IOException {
    List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);

    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
      // Off heap block data support not added for PREFIX_TREE DBE yet.
      // TODO remove this once support is added. HBASE-12298
      if (this.useOffheapData && encoding == DataBlockEncoding.PREFIX_TREE) continue;
      if (encoding.getEncoder() == null) {
        continue;
      }
      DataBlockEncoder encoder = encoding.getEncoder();
      ByteBuffer encodedBuffer =
          encodeKeyValues(
              encoding,
              sampleKv,
              getEncodingContext(Compression.Algorithm.NONE, encoding),
              this.useOffheapData);
      Cell key = encoder.getFirstKeyCellInBlock(new SingleByteBuff(encodedBuffer));
      KeyValue firstKv = sampleKv.get(0);
      if (0 != CellComparator.COMPARATOR.compareKeyIgnoresMvcc(key, firstKv)) {
        int commonPrefix = CellUtil.findCommonPrefixInFlatKey(key, firstKv, false, true);
        fail(String.format("Bug in '%s' commonPrefix %d", encoder.toString(), commonPrefix));
      }
    }
  }
  @Test
  public void testNextOnSample() throws IOException {
    List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);

    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
      // Off heap block data support not added for PREFIX_TREE DBE yet.
      // TODO remove this once support is added. HBASE-12298
      if (this.useOffheapData && encoding == DataBlockEncoding.PREFIX_TREE) continue;
      if (encoding.getEncoder() == null) {
        continue;
      }
      DataBlockEncoder encoder = encoding.getEncoder();
      ByteBuffer encodedBuffer =
          encodeKeyValues(
              encoding,
              sampleKv,
              getEncodingContext(Compression.Algorithm.NONE, encoding),
              this.useOffheapData);
      HFileContext meta =
          new HFileContextBuilder()
              .withHBaseCheckSum(false)
              .withIncludesMvcc(includesMemstoreTS)
              .withIncludesTags(includesTags)
              .withCompression(Compression.Algorithm.NONE)
              .build();
      DataBlockEncoder.EncodedSeeker seeker =
          encoder.createSeeker(
              CellComparator.COMPARATOR, encoder.newDataBlockDecodingContext(meta));
      seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
      int i = 0;
      do {
        KeyValue expectedKeyValue = sampleKv.get(i);
        Cell cell = seeker.getCell();
        if (CellComparator.COMPARATOR.compareKeyIgnoresMvcc(expectedKeyValue, cell) != 0) {
          int commonPrefix =
              CellUtil.findCommonPrefixInFlatKey(expectedKeyValue, cell, false, true);
          fail(
              String.format(
                  "next() produces wrong results "
                      + "encoder: %s i: %d commonPrefix: %d"
                      + "\n expected %s\n actual      %s",
                  encoder.toString(),
                  i,
                  commonPrefix,
                  Bytes.toStringBinary(
                      expectedKeyValue.getBuffer(),
                      expectedKeyValue.getKeyOffset(),
                      expectedKeyValue.getKeyLength()),
                  CellUtil.toString(cell, false)));
        }
        i++;
      } while (seeker.next());
    }
  }
  /** Test seeking while file is encoded. */
  @Test
  public void testSeekingOnSample() throws IOException {
    List<KeyValue> sampleKv = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);

    // create all seekers
    List<DataBlockEncoder.EncodedSeeker> encodedSeekers =
        new ArrayList<DataBlockEncoder.EncodedSeeker>();
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
      LOG.info("Encoding: " + encoding);
      // Off heap block data support not added for PREFIX_TREE DBE yet.
      // TODO remove this once support is added. HBASE-12298
      if (this.useOffheapData && encoding == DataBlockEncoding.PREFIX_TREE) continue;
      DataBlockEncoder encoder = encoding.getEncoder();
      if (encoder == null) {
        continue;
      }
      LOG.info("Encoder: " + encoder);
      ByteBuffer encodedBuffer =
          encodeKeyValues(
              encoding,
              sampleKv,
              getEncodingContext(Compression.Algorithm.NONE, encoding),
              this.useOffheapData);
      HFileContext meta =
          new HFileContextBuilder()
              .withHBaseCheckSum(false)
              .withIncludesMvcc(includesMemstoreTS)
              .withIncludesTags(includesTags)
              .withCompression(Compression.Algorithm.NONE)
              .build();
      DataBlockEncoder.EncodedSeeker seeker =
          encoder.createSeeker(
              CellComparator.COMPARATOR, encoder.newDataBlockDecodingContext(meta));
      seeker.setCurrentBuffer(new SingleByteBuff(encodedBuffer));
      encodedSeekers.add(seeker);
    }
    LOG.info("Testing it!");
    // test it!
    // try a few random seeks
    for (boolean seekBefore : new boolean[] {false, true}) {
      for (int i = 0; i < NUM_RANDOM_SEEKS; ++i) {
        int keyValueId;
        if (!seekBefore) {
          keyValueId = randomizer.nextInt(sampleKv.size());
        } else {
          keyValueId = randomizer.nextInt(sampleKv.size() - 1) + 1;
        }

        KeyValue keyValue = sampleKv.get(keyValueId);
        checkSeekingConsistency(encodedSeekers, seekBefore, keyValue);
      }
    }

    // check edge cases
    LOG.info("Checking edge cases");
    checkSeekingConsistency(encodedSeekers, false, sampleKv.get(0));
    for (boolean seekBefore : new boolean[] {false, true}) {
      checkSeekingConsistency(encodedSeekers, seekBefore, sampleKv.get(sampleKv.size() - 1));
      KeyValue midKv = sampleKv.get(sampleKv.size() / 2);
      Cell lastMidKv = CellUtil.createLastOnRowCol(midKv);
      checkSeekingConsistency(encodedSeekers, seekBefore, lastMidKv);
    }
    LOG.info("Done");
  }
 /**
  * Test whether compression -> decompression gives the consistent results on pseudorandom sample.
  *
  * @throws IOException On test failure.
  */
 @Test
 public void testExecutionOnSample() throws IOException {
   List<KeyValue> kvList = generator.generateTestKeyValues(NUMBER_OF_KV, includesTags);
   testEncodersOnDataset(kvList, includesMemstoreTS, includesTags);
 }
  @Test(timeout = 6000000)
  public void testHFileEncryption() throws Exception {
    // Create 1000 random test KVs
    RedundantKVGenerator generator = new RedundantKVGenerator();
    List<KeyValue> testKvs = generator.generateTestKeyValues(1000);

    // Iterate through data block encoding and compression combinations
    Configuration conf = TEST_UTIL.getConfiguration();
    CacheConfig cacheConf = new CacheConfig(conf);
    for (DataBlockEncoding encoding : DataBlockEncoding.values()) {
      for (Compression.Algorithm compression : TestHFileBlock.COMPRESSION_ALGORITHMS) {
        HFileContext fileContext =
            new HFileContextBuilder()
                .withBlockSize(4096) // small blocks
                .withEncryptionContext(cryptoContext)
                .withCompression(compression)
                .withDataBlockEncoding(encoding)
                .build();
        // write a new test HFile
        LOG.info("Writing with " + fileContext);
        Path path = new Path(TEST_UTIL.getDataTestDir(), UUID.randomUUID().toString() + ".hfile");
        FSDataOutputStream out = fs.create(path);
        HFile.Writer writer =
            HFile.getWriterFactory(conf, cacheConf)
                .withOutputStream(out)
                .withFileContext(fileContext)
                .create();
        try {
          for (KeyValue kv : testKvs) {
            writer.append(kv);
          }
        } finally {
          writer.close();
          out.close();
        }

        // read it back in
        LOG.info("Reading with " + fileContext);
        int i = 0;
        HFileScanner scanner = null;
        HFile.Reader reader = HFile.createReader(fs, path, cacheConf, conf);
        try {
          reader.loadFileInfo();
          FixedFileTrailer trailer = reader.getTrailer();
          assertNotNull(trailer.getEncryptionKey());
          scanner = reader.getScanner(false, false);
          assertTrue("Initial seekTo failed", scanner.seekTo());
          do {
            Cell kv = scanner.getCell();
            assertTrue(
                "Read back an unexpected or invalid KV",
                testKvs.contains(KeyValueUtil.ensureKeyValue(kv)));
            i++;
          } while (scanner.next());
        } finally {
          reader.close();
          scanner.close();
        }

        assertEquals("Did not read back as many KVs as written", i, testKvs.size());

        // Test random seeks with pread
        LOG.info("Random seeking with " + fileContext);
        reader = HFile.createReader(fs, path, cacheConf, conf);
        try {
          scanner = reader.getScanner(false, true);
          assertTrue("Initial seekTo failed", scanner.seekTo());
          for (i = 0; i < 100; i++) {
            KeyValue kv = testKvs.get(RNG.nextInt(testKvs.size()));
            assertEquals("Unable to find KV as expected: " + kv, scanner.seekTo(kv), 0);
          }
        } finally {
          scanner.close();
          reader.close();
        }
      }
    }
  }