/** * Performs the compaction. * * @param scanner Where to read from. * @param writer Where to write to. * @param smallestReadPoint Smallest read point. * @param cleanSeqId When true, remove seqId(used to be mvcc) value which is <= smallestReadPoint * @return Whether compaction ended; false if it was interrupted for some reason. */ protected boolean performCompaction( InternalScanner scanner, CellSink writer, long smallestReadPoint, boolean cleanSeqId) throws IOException { int bytesWritten = 0; // Since scanner.next() can return 'false' but still be delivering data, // we have to use a do/while loop. List<Cell> kvs = new ArrayList<Cell>(); int closeCheckInterval = HStore.getCloseCheckInterval(); long lastMillis; if (LOG.isDebugEnabled()) { lastMillis = System.currentTimeMillis(); } else { lastMillis = 0; } boolean hasMore; do { hasMore = scanner.next(kvs, compactionKVMax); // output to writer: for (Cell c : kvs) { KeyValue kv = KeyValueUtil.ensureKeyValue(c); if (cleanSeqId && kv.getSequenceId() <= smallestReadPoint) { kv.setSequenceId(0); } writer.append(kv); ++progress.currentCompactedKVs; progress.totalCompactedSize += kv.getLength(); // check periodically to see if a system stop is requested if (closeCheckInterval > 0) { bytesWritten += kv.getLength(); if (bytesWritten > closeCheckInterval) { // Log the progress of long running compactions every minute if // logging at DEBUG level if (LOG.isDebugEnabled()) { long now = System.currentTimeMillis(); if ((now - lastMillis) >= 60 * 1000) { LOG.debug( "Compaction progress: " + progress + String.format( ", rate=%.2f kB/sec", (bytesWritten / 1024.0) / ((now - lastMillis) / 1000.0))); lastMillis = now; } } bytesWritten = 0; if (!store.areWritesEnabled()) { progress.cancel(); return false; } } } } kvs.clear(); } while (hasMore); progress.complete(); return true; }
/** * Do the encoding, but do not cache the encoded data. * * @return encoded data block with header and checksum */ public byte[] encodeData() { ByteArrayOutputStream baos = new ByteArrayOutputStream(); try { baos.write(HConstants.HFILEBLOCK_DUMMY_HEADER); DataOutputStream out = new DataOutputStream(baos); this.dataBlockEncoder.startBlockEncoding(encodingCtx, out); ByteBuffer in = getUncompressedBuffer(); in.rewind(); int klength, vlength; int tagsLength = 0; long memstoreTS = 0L; KeyValue kv = null; while (in.hasRemaining()) { int kvOffset = in.position(); klength = in.getInt(); vlength = in.getInt(); ByteBufferUtils.skip(in, klength + vlength); if (this.meta.isIncludesTags()) { tagsLength = ((in.get() & 0xff) << 8) ^ (in.get() & 0xff); ByteBufferUtils.skip(in, tagsLength); } if (this.meta.isIncludesMvcc()) { memstoreTS = ByteBufferUtils.readVLong(in); } kv = new KeyValue( in.array(), kvOffset, (int) KeyValue.getKeyValueDataStructureSize(klength, vlength, tagsLength)); kv.setSequenceId(memstoreTS); this.dataBlockEncoder.encode(kv, encodingCtx, out); } BufferGrabbingByteArrayOutputStream stream = new BufferGrabbingByteArrayOutputStream(); baos.writeTo(stream); this.dataBlockEncoder.endBlockEncoding(encodingCtx, out, stream.buf); } catch (IOException e) { throw new RuntimeException( String.format( "Bug in encoding part of algorithm %s. " + "Probably it requested more bytes than are available.", toString()), e); } return baos.toByteArray(); }