long blockSize = calculateBlockSize(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR, cache.cacheBlock(i.cacheKey, i); cache.cacheBlock(i.cacheKey, i); assertEquals(15,cache.getBlockCount()); for (CachedItem i:small) { assertNotNull(cache.getBlock(i.cacheKey, true, false, false)); assertNull(cache.getBlock(i.cacheKey, true, false, false)); assertEquals(10, cache.getStats().getFailedInserts());
long currentAcceptableSize = acceptableSize(); long hardLimitSize = (long) (hardCapacityLimitFactor * currentAcceptableSize); if (currentSize >= hardLimitSize) { runEviction(); long newSize = updateSizeMetrics(cb, false); map.put(cacheKey, cb); long val = elements.incrementAndGet(); assertCounterSanity(size, val); runEviction();
@Override public void run() { for (int blockIndex = 0; blockIndex < blocksPerThread || (!cache.isEvictionInProgress()); ++blockIndex) { CachedItem block = new CachedItem(hfileName, (int) blockSize, blockCount.getAndIncrement()); boolean inMemory = Math.random() > 0.5; cache.cacheBlock(block.cacheKey, block, inMemory); } cache.evictBlocksByHfileName(hfileName); } });
@Override public long getFreeSize() { return getMaxSize() - getCurrentSize(); }
@Override public void setMaxSize(long maxSize) { this.maxSize = maxSize; if (this.size.get() > acceptableSize() && !evictionInProgress) { runEviction(); } }
@Override public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat, boolean updateCacheMetrics) { // TODO: is there a hole here, or just awkwardness since in the lruCache getBlock // we end up calling l2Cache.getBlock. // We are not in a position to exactly look at LRU cache or BC as BlockType may not be getting // passed always. return onHeapCache.containsBlock(cacheKey)? onHeapCache.getBlock(cacheKey, caching, repeat, updateCacheMetrics): l2Cache.getBlock(cacheKey, caching, repeat, updateCacheMetrics); }
@Override public String toString() { return MoreObjects.toStringHelper(this) .add("blockCount", getBlockCount()) .add("currentSize", StringUtils.byteDesc(getCurrentSize())) .add("freeSize", StringUtils.byteDesc(getFreeSize())) .add("maxSize", StringUtils.byteDesc(getMaxSize())) .add("heapSize", StringUtils.byteDesc(heapSize())) .add("minSize", StringUtils.byteDesc(minSize())) .add("minFactor", minFactor) .add("multiSize", StringUtils.byteDesc(multiSize())) .add("multiFactor", multiFactor) .add("singleSize", StringUtils.byteDesc(singleSize())) .add("singleFactor", singleFactor) .toString(); }
long blockSize = calculateBlockSize(maxSize, 31); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR, cache.cacheBlock(singleBlocks[i].cacheKey, singleBlocks[i]); cache.cacheBlock(multiBlocks[i].cacheKey, multiBlocks[i]); cache.getBlock(multiBlocks[i].cacheKey, true, false, true); cache.cacheBlock(memoryBlocks[i].cacheKey, memoryBlocks[i], true); assertEquals(0, cache.getStats().getEvictionCount()); cache.setMaxSize((long)(maxSize * 0.5f)); assertEquals(1, cache.getStats().getEvictionCount()); assertEquals(15, cache.getStats().getEvictedCount()); assertEquals(null, cache.getBlock(singleBlocks[i].cacheKey, true, false, true)); assertEquals(null, cache.getBlock(multiBlocks[i].cacheKey, true, false, true)); assertEquals(null, cache.getBlock(memoryBlocks[i].cacheKey, true, false, true)); assertEquals(singleBlocks[i], cache.getBlock(singleBlocks[i].cacheKey, true, false, true)); assertEquals(multiBlocks[i], cache.getBlock(multiBlocks[i].cacheKey, true, false, true)); assertEquals(memoryBlocks[i], cache.getBlock(memoryBlocks[i].cacheKey, true, false, true));
lazyCompressDisabled.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, false); CacheConfig cc = new CacheConfig(lazyCompressDisabled, new LruBlockCache(maxSize, HConstants.DEFAULT_BLOCKSIZE, false, lazyCompressDisabled)); assertFalse(cc.shouldCacheDataCompressed()); assertTrue(cc.getBlockCache().get() instanceof LruBlockCache); LruBlockCache disabledBlockCache = (LruBlockCache) cc.getBlockCache().get(); LOG.info("disabledBlockCache=" + disabledBlockCache); assertEquals("test inconsistency detected.", maxSize, disabledBlockCache.getMaxSize()); assertTrue("eviction thread spawned unintentionally.", disabledBlockCache.getEvictionThread() == null); assertEquals("freshly created blockcache contains blocks.", 0, disabledBlockCache.getBlockCount()); long disabledBlockCount = disabledBlockCache.getBlockCount(); assertTrue("blockcache should contain blocks. disabledBlockCount=" + disabledBlockCount, disabledBlockCount > 0); long disabledEvictedCount = disabledBlockCache.getStats().getEvictedCount(); for (Map.Entry<BlockCacheKey, LruCachedBlock> e : disabledBlockCache.getMapForTests().entrySet()) { HFileBlock block = (HFileBlock) e.getValue().getBuffer(); assertTrue("found a packed block, block=" + block, block.isUnpacked()); lazyCompressEnabled.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true); cc = new CacheConfig(lazyCompressEnabled, new LruBlockCache(maxSize, HConstants.DEFAULT_BLOCKSIZE, false, lazyCompressEnabled)); assertTrue("test improperly configured.", cc.shouldCacheDataCompressed()); assertTrue(cc.getBlockCache().get() instanceof LruBlockCache); LruBlockCache enabledBlockCache = (LruBlockCache) cc.getBlockCache().get(); LOG.info("enabledBlockCache=" + enabledBlockCache);
assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); LruBlockCache cache = new LruBlockCache(maxSize,blockSize); EvictionThread evictionThread = cache.getEvictionThread(); assertTrue(evictionThread != null); cache.cacheBlock(block.cacheKey, block); while(cache.getStats().getEvictionCount() == 0) { Thread.sleep(200); assertTrue("Eviction never happened.", n++ < 20); curCnt = cache.getBlockCount(); prevCnt != curCnt; prevCnt = curCnt, curCnt = cache.getBlockCount()) { Thread.sleep(200); assertTrue("Cache never stabilized.", n++ < 20); long evictionCount = cache.getStats().getEvictionCount(); assertTrue(evictionCount >= 1); System.out.println("Background Evictions run: " + evictionCount);
assertEquals(lruExpectedSize, lbc.getMaxSize()); BlockCache bc = cbc.l2Cache; long initialL1BlockCount = lbc.getBlockCount(); long initialL2BlockCount = bc.getBlockCount(); Cacheable c = new DataCacheEntry(); BlockCacheKey bck = new BlockCacheKey("bck", 0); lbc.cacheBlock(bck, c, false); assertEquals(initialL1BlockCount + 1, lbc.getBlockCount()); assertEquals(initialL2BlockCount, bc.getBlockCount()); final long justTooBigSize = lbc.acceptableSize() + 1; lbc.cacheBlock(new BlockCacheKey("bck2", 0), new DataCacheEntry() { @Override public long heapSize() { while (initialL1BlockCount != lbc.getBlockCount()) Threads.sleep(10); assertEquals(initialL1BlockCount, lbc.getBlockCount());
final LruBlockCache cache = new LruBlockCache(maxSize, blockSize); EvictionThread evictionThread = cache.getEvictionThread(); assertTrue(evictionThread != null); while (!evictionThread.isEnteringRun()) { assertEquals(0, cache.getBlockCount()); assertEquals(cache.getOverhead(), cache.getCurrentSize());
private void testEncodingWithCacheInternals(boolean useTag) throws IOException { List<KeyValue> kvs = generator.generateTestKeyValues(60, useTag); HFileBlock block = getSampleHFileBlock(kvs, useTag); HFileBlock cacheBlock = createBlockOnDisk(kvs, block, useTag); LruBlockCache blockCache = new LruBlockCache(8 * 1024 * 1024, 32 * 1024); BlockCacheKey cacheKey = new BlockCacheKey("test", 0); blockCache.cacheBlock(cacheKey, cacheBlock); HeapSize heapSize = blockCache.getBlock(cacheKey, false, false, true); assertTrue(heapSize instanceof HFileBlock); HFileBlock returnedBlock = (HFileBlock) heapSize; if (blockEncoder.getDataBlockEncoding() == DataBlockEncoding.NONE) { assertEquals(block.getBufferReadOnly(), returnedBlock.getBufferReadOnly()); } else { if (BlockType.ENCODED_DATA != returnedBlock.getBlockType()) { System.out.println(blockEncoder); } assertEquals(BlockType.ENCODED_DATA, returnedBlock.getBlockType()); } }
private static LruBlockCache createOnHeapCache(final Configuration c) { final long cacheSize = MemorySizeUtil.getOnHeapCacheSize(c); if (cacheSize < 0) { return null; } int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE); LOG.info( "Allocating onheap LruBlockCache size=" + StringUtils.byteDesc(cacheSize) + ", blockSize=" + StringUtils.byteDesc(blockSize)); return new LruBlockCache(cacheSize, blockSize, true, c); }
HFileBlock.FILL_HEADER, -1, -1, -1, meta); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR, cache.clearCache(); assertNull(cache.getBlock(key, false, false, false)); CacheTestUtils.getBlockAndAssertEquals(cache, key, blockWithoutNextBlockMetadata, actualBuffer, block2Buffer);
/** * Evict the block, and it will be cached by the victim handler if exists && * block may be read again later * @param block * @param evictedByEvictionProcess true if the given block is evicted by * EvictionThread * @return the heap size of evicted block */ protected long evictBlock(LruCachedBlock block, boolean evictedByEvictionProcess) { map.remove(block.getCacheKey()); updateSizeMetrics(block, true); long val = elements.decrementAndGet(); if (LOG.isTraceEnabled()) { long size = map.size(); assertCounterSanity(size, val); } stats.evicted(block.getCachedTime(), block.getCacheKey().isPrimary()); if (evictedByEvictionProcess && victimHandler != null) { if (victimHandler instanceof BucketCache) { boolean wait = getCurrentSize() < acceptableSize(); boolean inMemory = block.getPriority() == BlockPriority.MEMORY; ((BucketCache)victimHandler).cacheBlockWithWait(block.getCacheKey(), block.getBuffer(), inMemory, wait); } else { victimHandler.cacheBlock(block.getCacheKey(), block.getBuffer()); } } return block.heapSize(); }
/** * Cache the block with the specified name and buffer. * <p> * It is assumed this will NEVER be called on an already cached block. If * that is done, an exception will be thrown. * @param cacheKey block's cache key * @param buf block buffer * @param inMemory if block is in-memory */ public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf, boolean inMemory) { CachedBlock cb = map.get(cacheKey); if(cb != null) { throw new RuntimeException("Cached an already cached block"); } cb = new CachedBlock(cacheKey, buf, count.incrementAndGet(), inMemory); long newSize = updateSizeMetrics(cb, false); map.put(cacheKey, cb); elements.incrementAndGet(); if(newSize > acceptableSize() && !evictionInProgress) { runEviction(); } }
/** * Cache the block with the specified name and buffer. * <p> * * @param cacheKey block's cache key * @param buf block buffer */ @Override public void cacheBlock(BlockCacheKey cacheKey, Cacheable buf) { cacheBlock(cacheKey, buf, false); }
public void logStats() { // Log size long totalSize = heapSize(); long freeSize = maxSize - totalSize; LruBlockCache.LOG.info("totalSize=" + StringUtils.byteDesc(totalSize) + ", " + "freeSize=" + StringUtils.byteDesc(freeSize) + ", " + "max=" + StringUtils.byteDesc(this.maxSize) + ", " + "blockCount=" + getBlockCount() + ", " + "accesses=" + stats.getRequestCount() + ", " + "hits=" + stats.getHitCount() + ", " + "hitRatio=" + (stats.getHitCount() == 0 ? "0" : (StringUtils.formatPercent(stats.getHitRatio(), 2)+ ", ")) + ", " + "cachingAccesses=" + stats.getRequestCachingCount() + ", " + "cachingHits=" + stats.getHitCachingCount() + ", " + "cachingHitsRatio=" + (stats.getHitCachingCount() == 0 ? "0,": (StringUtils.formatPercent(stats.getHitCachingRatio(), 2) + ", ")) + "evictions=" + stats.getEvictionCount() + ", " + "evicted=" + stats.getEvictedCount() + ", " + "evictedPerRun=" + stats.evictedPerEviction()); }
@Override public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat) { Cacheable cachedBlock; if ((cachedBlock = onHeapCache.getBlock(cacheKey, caching, repeat)) != null) { stats.hit(caching); return cachedBlock; } else if ((cachedBlock = offHeapCache.getBlock(cacheKey, caching, repeat)) != null) { if (caching) { onHeapCache.cacheBlock(cacheKey, cachedBlock); } stats.hit(caching); return cachedBlock; } if (!repeat) stats.miss(caching); return null; }