@Test
 public void testCompressorDecompressorWithExeedBufferLimit() {
   int BYTE_SIZE = 100 * 1024;
   byte[] rawData = generate(BYTE_SIZE);
   try {
     CompressDecompressTester.of(rawData)
         .withCompressDecompressPair(
             new ZlibCompressor(
                 org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionLevel
                     .BEST_COMPRESSION,
                 CompressionStrategy.DEFAULT_STRATEGY,
                 org.apache.hadoop.io.compress.zlib.ZlibCompressor.CompressionHeader
                     .DEFAULT_HEADER,
                 BYTE_SIZE),
             new ZlibDecompressor(
                 org.apache.hadoop.io.compress.zlib.ZlibDecompressor.CompressionHeader
                     .DEFAULT_HEADER,
                 BYTE_SIZE))
         .withTestCases(
             ImmutableSet.of(
                 CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                 CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                 CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                 CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
         .test();
   } catch (Exception ex) {
     fail("testCompressorDecompressorWithExeedBufferLimit error !!!" + ex);
   }
 }
  @Test
  public void testZlibCompressorDecompressor() {
    try {
      int SIZE = 44 * 1024;
      byte[] rawData = generate(SIZE);

      CompressDecompressTester.of(rawData)
          .withCompressDecompressPair(new ZlibCompressor(), new ZlibDecompressor())
          .withTestCases(
              ImmutableSet.of(
                  CompressionTestStrategy.COMPRESS_DECOMPRESS_SINGLE_BLOCK,
                  CompressionTestStrategy.COMPRESS_DECOMPRESS_BLOCK,
                  CompressionTestStrategy.COMPRESS_DECOMPRESS_ERRORS,
                  CompressionTestStrategy.COMPRESS_DECOMPRESS_WITH_EMPTY_STREAM))
          .test();
    } catch (Exception ex) {
      fail("testCompressorDecompressor error !!!" + ex);
    }
  }