@Override public void cacheBlock(BlockCacheKey blockName, Cacheable toBeCached) { ByteBuffer storedBlock; try { storedBlock = backingStore.alloc(toBeCached.getSerializedLength()); } catch (InterruptedException e) { LOG.warn("SlabAllocator was interrupted while waiting for block to become available"); LOG.warn(e); return; } CacheablePair newEntry = new CacheablePair(toBeCached.getDeserializer(), storedBlock); toBeCached.serialize(storedBlock); synchronized (this) { CacheablePair alreadyCached = backingMap.putIfAbsent(blockName, newEntry); if (alreadyCached != null) { backingStore.free(storedBlock); throw new RuntimeException("already cached " + blockName); } if (actionWatcher != null) { actionWatcher.onInsertion(blockName, this); } } newEntry.recentlyAccessed.set(System.nanoTime()); this.size.addAndGet(newEntry.heapSize()); }
@Test public void testReturns() { Slab slab = new Slab(0, SLAB_SIZE, PAGE_SIZE); ByteBuffer[] pages = new ByteBuffer[PAGES]; // Take all pages for (int p = 0; p < PAGES; p++) { pages[p] = slab.takePageBuffer(); } // Put them back for (int p = 0; p < PAGES; p++) { slab.returnPageBuffer(pages[p]); } // Take them again int[] arrayOffsets = new int[PAGES]; for (int p = 0; p < PAGES; p++) { ByteBuffer buffer = slab.takePageBuffer(); assertNotNull(buffer); assertEquals(0, buffer.position()); assertEquals(PAGE_SIZE, buffer.limit()); arrayOffsets[p] = buffer.arrayOffset(); } assertNull(slab.takePageBuffer()); Arrays.sort(arrayOffsets); int expectedArrayOffset = 0; for (int b = 0; b < PAGES; b++) { assertEquals(expectedArrayOffset, arrayOffsets[b]); expectedArrayOffset += PAGE_SIZE; } }
/** * Default constructor. Specify the size of the blocks, number of blocks, and the SlabCache this * cache will be assigned to. * * @param blockSize the size of each block, in bytes * @param numBlocks the number of blocks of blockSize this cache will hold. * @param master the SlabCache this SingleSlabCache is assigned to. */ public SingleSizeCache(int blockSize, int numBlocks, SlabItemActionWatcher master) { this.blockSize = blockSize; this.numBlocks = numBlocks; backingStore = new Slab(blockSize, numBlocks); this.stats = new CacheStats(); this.actionWatcher = master; this.size = new AtomicLong(CACHE_FIXED_OVERHEAD + backingStore.heapSize()); this.timeSinceLastAccess = new AtomicLong(); // This evictionListener is called whenever the cache automatically // evicts something. RemovalListener<BlockCacheKey, CacheablePair> listener = new RemovalListener<BlockCacheKey, CacheablePair>() { @Override public void onRemoval(RemovalNotification<BlockCacheKey, CacheablePair> notification) { if (!notification.wasEvicted()) { // Only process removals by eviction, not by replacement or // explicit removal return; } CacheablePair value = notification.getValue(); timeSinceLastAccess.set(System.nanoTime() - value.recentlyAccessed.get()); stats.evict(); doEviction(notification.getKey(), value); } }; backingMap = CacheBuilder.newBuilder() .maximumSize(numBlocks - 1) .removalListener(listener) .<BlockCacheKey, CacheablePair>build() .asMap(); }
private void doEviction(BlockCacheKey key, CacheablePair evictedBlock) { long evictedHeap = 0; synchronized (evictedBlock) { if (evictedBlock.serializedData == null) { // someone else already freed return; } evictedHeap = evictedBlock.heapSize(); ByteBuffer bb = evictedBlock.serializedData; evictedBlock.serializedData = null; backingStore.free(bb); // We have to do this callback inside the synchronization here. // Otherwise we can have the following interleaving: // Thread A calls getBlock(): // SlabCache directs call to this SingleSizeCache // It gets the CacheablePair object // Thread B runs eviction // doEviction() is called and sets serializedData = null, here. // Thread A sees the null serializedData, and returns null // Thread A calls cacheBlock on the same block, and gets // "already cached" since the block is still in backingStore if (actionWatcher != null) { actionWatcher.onEviction(key, this); } } stats.evicted(); size.addAndGet(-1 * evictedHeap); }
@Test public void testInitialAllocations() { Slab slab = new Slab(0, SLAB_SIZE, PAGE_SIZE); int[] arrayOffsets = new int[PAGES]; for (int p = 0; p < PAGES; p++) { ByteBuffer buffer = slab.takePageBuffer(); assertNotNull(buffer); assertEquals(0, buffer.position()); assertEquals(PAGE_SIZE, buffer.limit()); arrayOffsets[p] = buffer.arrayOffset(); } assertNull(slab.takePageBuffer()); Arrays.sort(arrayOffsets); int expectedArrayOffset = 0; for (int b = 0; b < PAGES; b++) { assertEquals(expectedArrayOffset, arrayOffsets[b]); expectedArrayOffset += PAGE_SIZE; } }
@Override public long getBlockCount() { return numBlocks - backingStore.getBlocksRemaining(); }
public long getOccupiedSize() { return (long) (numBlocks - backingStore.getBlocksRemaining()) * (long) blockSize; }
public long getFreeSize() { return (long) backingStore.getBlocksRemaining() * (long) blockSize; }
public long heapSize() { return this.size.get() + backingStore.heapSize(); }
public void shutdown() { backingStore.shutdown(); }