private void flushBufferedData() throws IOException { if (o == 0) { return; } checksum.reset(); checksum.update(buffer, 0, o); final int check = (int) checksum.getValue(); int compressedLength = compressor.compress(buffer, 0, o, compressedBuffer, HEADER_LENGTH); final int compressMethod; if (compressedLength >= o) { compressMethod = COMPRESSION_METHOD_RAW; compressedLength = o; System.arraycopy(buffer, 0, compressedBuffer, HEADER_LENGTH, o); } else { compressMethod = COMPRESSION_METHOD_LZ4; } compressedBuffer[MAGIC_LENGTH] = (byte) (compressMethod | compressionLevel); writeIntLE(compressedLength, compressedBuffer, MAGIC_LENGTH + 1); writeIntLE(o, compressedBuffer, MAGIC_LENGTH + 5); writeIntLE(check, compressedBuffer, MAGIC_LENGTH + 9); assert MAGIC_LENGTH + 13 == HEADER_LENGTH; out.write(compressedBuffer, 0, HEADER_LENGTH + compressedLength); o = 0; }
/** * Create a new {@link OutputStream} with configurable block size. Large blocks require more * memory at compression and decompression time but should improve the compression ratio. * * @param out the {@link OutputStream} to feed * @param blockSize the maximum number of bytes to try to compress at once, must be >= 64 and <= * 32 M * @param compressor the {@link LZ4Compressor} instance to use to compress data * @param checksum the {@link Checksum} instance to use to check data for integrity. * @param syncFlush true if pending data should also be flushed on {@link #flush()} */ public LZ4BlockOutputStream( OutputStream out, int blockSize, LZ4Compressor compressor, Checksum checksum, boolean syncFlush) { super(out); this.blockSize = blockSize; this.compressor = compressor; this.checksum = checksum; this.compressionLevel = compressionLevel(blockSize); this.buffer = new byte[blockSize]; this.compressedBuffer = new byte[HEADER_LENGTH + compressor.maxCompressedLength(blockSize)]; this.syncFlush = syncFlush; o = 0; finished = false; System.arraycopy(MAGIC, 0, compressedBuffer, 0, MAGIC_LENGTH); }