/** * This procedure runs on the unisolated index. The raw data is written directly onto the {@link * Journal} and the index is added/updated using the given file, version and block and the address * of the block's data on the {@link Journal}. * * @return A {@link Boolean} whose value is <code>true</code> iff the block was overwritten. */ @Override public Object apply(final IIndex ndx) { // tunnel through to the backing journal. final AbstractJournal journal = (AbstractJournal) ((AbstractBTree) ndx).getStore(); // obtain the thread-local key builder for that journal. final IKeyBuilder keyBuilder = ndx.getIndexMetadata().getKeyBuilder(); /* * Write the block on the journal, obtaining the address at which it * was written - use 0L as the address for an empty block. */ final long addr = len == 0 ? 0L : journal.write(ByteBuffer.wrap(b, off, len)); // form the key for the index entry for this block. final byte[] key = keyBuilder .reset() .appendText(id, true /* unicode */, false /* successor */) .append(version) .append(block) .getKey(); // record the address of the block in the index. final boolean overwrite; { final DataOutputBuffer out = new DataOutputBuffer(Bytes.SIZEOF_LONG); // encode the value for the entry. out.reset().putLong(addr); final byte[] val = out.toByteArray(); // insert the entry into the index. overwrite = ndx.insert(key, val) != null; } log.info( "Wrote " + len + " bytes : id=" + id + ", version=" + version + ", block#=" + block + " @ addr" + journal.toString(addr) + ", overwrite=" + overwrite); return Boolean.valueOf(overwrite); }
private void doTest(final IUnicodeCompressor c) { final long begin = System.currentTimeMillis(); final Random r = new Random(); final int ntrials = 100000; // The source data to be encoded. final StringBuilder sb = new StringBuilder(); // The buffer onto which we encode that data. final DataOutputBuffer outEncoded = new DataOutputBuffer(); // The buffer onto which the decoded data are written. final StringBuilder outDecoded = new StringBuilder(); long nwords = 0L; // #of encoded words. long nchars = 0L; // #of encoded characters. long nbytes = 0L; // #of bytes for those encoded characters. for (int trial = 0; trial < ntrials; trial++) { final int wordLength = r.nextInt(20) + 1; // reset sb.setLength(0); // build up a string of randomly selected words. for (int i = 0; i < wordLength; i++) { if (i > 0) sb.append(" "); sb.append(words[r.nextInt(words.length)]); } final String expected = sb.toString(); /* * Encode. */ final byte[] a; final int nencoded; { // reset the output buffer. outEncoded.reset(); // encode the data. nencoded = c.encode(expected, outEncoded); // the encoded data. a = outEncoded.toByteArray(); } nwords += wordLength; nchars += expected.length(); nbytes += a.length; /* * Note: The caller needs to know the exact length to be decoded in * advance with this api. This implies that we will have to buffer * the data before it can be copied into an IKeyBuilder. This is not * a problem as long as it is the only thing in the buffer, but that * is not true for many use cases, including serialization of a * BigdataValue. */ final String actual; final int ndecoded; { // reset the output buffer. outDecoded.setLength(0); // decode. ndecoded = c.decode(new ByteArrayInputStream(a, 0 /* off */, a.length /* len */), outDecoded); // extract the decoded string. actual = outDecoded.toString(); } // verify encode/decode. assertEquals(expected, actual); // verify #of bytes encoded / #of bytes decoded. assertEquals(nencoded, ndecoded); } final long elapsed = System.currentTimeMillis() - begin; // The compression ratio. final double ratio = (double) nbytes / (double) nchars; if (log.isInfoEnabled()) log.info( "nwords=" + nwords + ", nchars=" + nchars + ", nbytes=" + nbytes + ", bytes/char=" + ratio + ", elapsed=" + elapsed); }