public void flush(final DbCollection coll, final String docName, final PropertyMap docProps) throws IOException { final File chunkFile = getChunkFile(coll, docName); assert (!chunkFile.exists()) : "file already exists: " + chunkFile.getAbsolutePath(); final int splen = _strPool.size(); final Segments paged = new VarSegments(chunkFile, DescriptorType.hash); for (int i = 0; i < splen; i++) { // big string final byte[] b = _strPool.get(i); final int addr = stringKey(i); paged.write(addr, b); } _strPool.clear(); final long lcclen = _cpointer >> BLOCK_SHIFT; assert (lcclen <= Integer.MAX_VALUE) : lcclen; final int cclen = Math.min((int) lcclen, _cchunks.length - 1); for (int i = 0; i <= cclen; i++) { final char[] c = _cchunks[i]; final byte[] b = compress(compressor, c); final long addr = chunkKey(i * DEFAULT_BLOCK_SIZE_L); paged.write(addr, b); _cchunks[i] = null; } docProps.setProperty(KEY_STRPOOL_WRITTEN, String.valueOf(splen)); docProps.setProperty(KEY_CHUNK_WRITTEN, String.valueOf(cclen)); paged.flush(false); close(); LOG.info("write string chunk file:" + chunkFile.getAbsolutePath()); }
@Override public void close() throws IOException { if (_refcount.decrementAndGet() == 0) { super.close(); PrivilegedAccessor.unsafeSetField(this, PagedStringChunk.class, "_cache", null); _paged.close(); this._paged = null; } }