@Override public boolean evictBlock(BlockCacheKey cacheKey) { boolean l1Result = this.onHeapCache.evictBlock(cacheKey); boolean l2Result = this.l2Cache.evictBlock(cacheKey); return l1Result || l2Result; } }
@Override public boolean evictBlock(BlockCacheKey cacheKey) { return onHeapCache.evictBlock(cacheKey) || l2Cache.evictBlock(cacheKey); }
@Override public void doAnAction() throws Exception { toBeTested.evictBlock(key); toBeTested.cacheBlock(key, bac); Thread.sleep(blockEvictPeriod); } };
@Override public void doAnAction() throws Exception { if (!blocksToTest.isEmpty()) { HFileBlockPair ourBlock = blocksToTest.poll(); // if we run out of blocks to test, then we should stop the tests. if (ourBlock == null) { ctx.setStopFlag(true); return; } toBeTested.cacheBlock(ourBlock.blockName, ourBlock.block); Cacheable retrievedBlock = toBeTested.getBlock(ourBlock.blockName, false, false, true); if (retrievedBlock != null) { assertEquals(ourBlock.block, retrievedBlock); toBeTested.evictBlock(ourBlock.blockName); hits.incrementAndGet(); assertNull(toBeTested.getBlock(ourBlock.blockName, false, false, true)); } else { miss.incrementAndGet(); } totalQueries.incrementAndGet(); } } };
@Override public void run() { Scan s = new Scan().withStartRow(ROW4).withStopRow(ROW5).setCaching(1); try { while(!doScan.get()) { try { // Sleep till you start scan Thread.sleep(1); } catch (InterruptedException e) { } } List<BlockCacheKey> cacheList = new ArrayList<>(); Iterator<CachedBlock> iterator = cache.iterator(); // evict all the blocks while (iterator.hasNext()) { CachedBlock next = iterator.next(); BlockCacheKey cacheKey = new BlockCacheKey(next.getFilename(), next.getOffset()); cacheList.add(cacheKey); // evict what ever is available cache.evictBlock(cacheKey); } try (ResultScanner scanner = table.getScanner(s)) { while (scanner.next() != null) { } } compactReadLatch.countDown(); } catch (IOException e) { } } }
private void clearBlockCache(BlockCache blockCache) throws InterruptedException { if (blockCache instanceof LruBlockCache) { ((LruBlockCache) blockCache).clearCache(); } else { // BucketCache may not return all cached blocks(blocks in write queue), so check it here. for (int clearCount = 0; blockCache.getBlockCount() > 0; clearCount++) { if (clearCount > 0) { LOG.warn("clear block cache " + blockCache + " " + clearCount + " times, " + blockCache.getBlockCount() + " blocks remaining"); Thread.sleep(10); } for (CachedBlock block : Lists.newArrayList(blockCache)) { BlockCacheKey key = new BlockCacheKey(block.getFilename(), block.getOffset()); // CombinedBucketCache may need evict two times. for (int evictCount = 0; blockCache.evictBlock(key); evictCount++) { if (evictCount > 1) { LOG.warn("evict block " + block + " in " + blockCache + " " + evictCount + " times, maybe a bug here"); } } } } } }
BlockCacheKey cacheKey = new BlockCacheKey(next.getFilename(), next.getOffset()); cacheList.add(cacheKey); cache.evictBlock(cacheKey);
cache.evictBlock(cacheKey);
/** * Just checks if heapsize grows when something is cached, and gets smaller * when the same object is evicted */ public static void testHeapSizeChanges(final BlockCache toBeTested, final int blockSize) { HFileBlockPair[] blocks = generateHFileBlocks(blockSize, 1); long heapSize = ((HeapSize) toBeTested).heapSize(); toBeTested.cacheBlock(blocks[0].blockName, blocks[0].block); /*When we cache something HeapSize should always increase */ assertTrue(heapSize < ((HeapSize) toBeTested).heapSize()); toBeTested.evictBlock(blocks[0].blockName); /*Post eviction, heapsize should be the same */ assertEquals(heapSize, ((HeapSize) toBeTested).heapSize()); }
private void addDataAndHits(final BlockCache bc, final int count) { Cacheable dce = new DataCacheEntry(); Cacheable ice = new IndexCacheEntry(); for (int i = 0; i < count; i++) { BlockCacheKey bckd = new BlockCacheKey("f", i); BlockCacheKey bcki = new BlockCacheKey("f", i + count); bc.getBlock(bckd, true, false, true); bc.cacheBlock(bckd, dce); bc.cacheBlock(bcki, ice); bc.getBlock(bckd, true, false, true); bc.getBlock(bcki, true, false, true); } assertEquals(2 * count /*Data and Index blocks*/, bc.getStats().getHitCount()); BlockCacheKey bckd = new BlockCacheKey("f", 0); BlockCacheKey bcki = new BlockCacheKey("f", 0 + count); bc.evictBlock(bckd); bc.evictBlock(bcki); bc.getStats().getEvictedCount(); }
/** * @param bc The block cache instance. * @param cc Cache config. * @param doubling If true, addition of element ups counter by 2, not 1, because element added * to onheap and offheap caches. * @param sizing True if we should run sizing test (doesn't always apply). */ void basicBlockCacheOps(final BlockCache bc, final CacheConfig cc, final boolean doubling, final boolean sizing) { assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory()); BlockCacheKey bck = new BlockCacheKey("f", 0); Cacheable c = new DataCacheEntry(); // Do asserts on block counting. long initialBlockCount = bc.getBlockCount(); bc.cacheBlock(bck, c, cc.isInMemory()); assertEquals(doubling ? 2 : 1, bc.getBlockCount() - initialBlockCount); bc.evictBlock(bck); assertEquals(initialBlockCount, bc.getBlockCount()); // Do size accounting. Do it after the above 'warm-up' because it looks like some // buffers do lazy allocation so sizes are off on first go around. if (sizing) { long originalSize = bc.getCurrentSize(); bc.cacheBlock(bck, c, cc.isInMemory()); assertTrue(bc.getCurrentSize() > originalSize); bc.evictBlock(bck); long size = bc.getCurrentSize(); assertEquals(originalSize, size); } }
@Override public boolean evictBlock(BlockCacheKey cacheKey) { return lruCache.evictBlock(cacheKey) || l2Cache.evictBlock(cacheKey); }
@Override public void doAnAction() throws Exception { toBeTested.evictBlock(key); toBeTested.cacheBlock(key, bac); Thread.sleep(blockEvictPeriod); } };
@Override public void close(boolean evictOnClose) throws IOException { if (evictOnClose && cacheConf.isBlockCacheEnabled()) { int numEvicted = 0; for (int i = 0; i < dataBlockIndexReader.getRootBlockCount(); i++) { if (cacheConf.getBlockCache().evictBlock( new BlockCacheKey(name, dataBlockIndexReader.getRootBlockOffset(i), DataBlockEncoding.NONE, BlockType.DATA))) { numEvicted++; } } LOG.debug("On close of file " + name + " evicted " + numEvicted + " block(s) of " + dataBlockIndexReader.getRootBlockCount() + " total blocks"); } if (this.closeIStream && this.istream != null) { this.istream.close(); this.istream = null; } getSchemaMetrics().flushMetrics(); }
@Override public void doAnAction() throws Exception { if (!blocksToTest.isEmpty()) { HFileBlockPair ourBlock = blocksToTest.poll(); // if we run out of blocks to test, then we should stop the tests. if (ourBlock == null) { ctx.setStopFlag(true); return; } toBeTested.cacheBlock(ourBlock.blockName, ourBlock.block); Cacheable retrievedBlock = toBeTested.getBlock(ourBlock.blockName, false, false, true); if (retrievedBlock != null) { assertEquals(ourBlock.block, retrievedBlock); toBeTested.evictBlock(ourBlock.blockName); hits.incrementAndGet(); assertNull(toBeTested.getBlock(ourBlock.blockName, false, false, true)); } else { miss.incrementAndGet(); } totalQueries.incrementAndGet(); } } };
@Override public void run() { Scan s = new Scan().withStartRow(ROW4).withStopRow(ROW5).setCaching(1); try { while(!doScan.get()) { try { // Sleep till you start scan Thread.sleep(1); } catch (InterruptedException e) { } } List<BlockCacheKey> cacheList = new ArrayList<>(); Iterator<CachedBlock> iterator = cache.iterator(); // evict all the blocks while (iterator.hasNext()) { CachedBlock next = iterator.next(); BlockCacheKey cacheKey = new BlockCacheKey(next.getFilename(), next.getOffset()); cacheList.add(cacheKey); // evict what ever is available cache.evictBlock(cacheKey); } try (ResultScanner scanner = table.getScanner(s)) { while (scanner.next() != null) { } } compactReadLatch.countDown(); } catch (IOException e) { } } }
private void clearBlockCache(BlockCache blockCache) throws InterruptedException { if (blockCache instanceof LruBlockCache) { ((LruBlockCache) blockCache).clearCache(); } else { // BucketCache may not return all cached blocks(blocks in write queue), so check it here. for (int clearCount = 0; blockCache.getBlockCount() > 0; clearCount++) { if (clearCount > 0) { LOG.warn("clear block cache " + blockCache + " " + clearCount + " times, " + blockCache.getBlockCount() + " blocks remaining"); Thread.sleep(10); } for (CachedBlock block : Lists.newArrayList(blockCache)) { BlockCacheKey key = new BlockCacheKey(block.getFilename(), block.getOffset()); // CombinedBucketCache may need evict two times. for (int evictCount = 0; blockCache.evictBlock(key); evictCount++) { if (evictCount > 1) { LOG.warn("evict block " + block + " in " + blockCache + " " + evictCount + " times, maybe a bug here"); } } } } } }
/** * Just checks if heapsize grows when something is cached, and gets smaller * when the same object is evicted */ public static void testHeapSizeChanges(final BlockCache toBeTested, final int blockSize) { HFileBlockPair[] blocks = generateHFileBlocks(blockSize, 1); long heapSize = ((HeapSize) toBeTested).heapSize(); toBeTested.cacheBlock(blocks[0].blockName, blocks[0].block); /*When we cache something HeapSize should always increase */ assertTrue(heapSize < ((HeapSize) toBeTested).heapSize()); toBeTested.evictBlock(blocks[0].blockName); /*Post eviction, heapsize should be the same */ assertEquals(heapSize, ((HeapSize) toBeTested).heapSize()); }
private void addDataAndHits(final BlockCache bc, final int count) { Cacheable dce = new DataCacheEntry(); Cacheable ice = new IndexCacheEntry(); for (int i = 0; i < count; i++) { BlockCacheKey bckd = new BlockCacheKey("f", i); BlockCacheKey bcki = new BlockCacheKey("f", i + count); bc.getBlock(bckd, true, false, true); bc.cacheBlock(bckd, dce); bc.cacheBlock(bcki, ice); bc.getBlock(bckd, true, false, true); bc.getBlock(bcki, true, false, true); } assertEquals(2 * count /*Data and Index blocks*/, bc.getStats().getHitCount()); BlockCacheKey bckd = new BlockCacheKey("f", 0); BlockCacheKey bcki = new BlockCacheKey("f", 0 + count); bc.evictBlock(bckd); bc.evictBlock(bcki); bc.getStats().getEvictedCount(); }
/** * @param cc * @param doubling If true, addition of element ups counter by 2, not 1, because element added * to onheap and offheap caches. * @param sizing True if we should run sizing test (doesn't always apply). */ void basicBlockCacheOps(final CacheConfig cc, final boolean doubling, final boolean sizing) { assertTrue(cc.isBlockCacheEnabled()); assertTrue(CacheConfig.DEFAULT_IN_MEMORY == cc.isInMemory()); BlockCache bc = cc.getBlockCache(); BlockCacheKey bck = new BlockCacheKey("f", 0); Cacheable c = new DataCacheEntry(); // Do asserts on block counting. long initialBlockCount = bc.getBlockCount(); bc.cacheBlock(bck, c, cc.isInMemory()); assertEquals(doubling? 2: 1, bc.getBlockCount() - initialBlockCount); bc.evictBlock(bck); assertEquals(initialBlockCount, bc.getBlockCount()); // Do size accounting. Do it after the above 'warm-up' because it looks like some // buffers do lazy allocation so sizes are off on first go around. if (sizing) { long originalSize = bc.getCurrentSize(); bc.cacheBlock(bck, c, cc.isInMemory()); assertTrue(bc.getCurrentSize() > originalSize); bc.evictBlock(bck); long size = bc.getCurrentSize(); assertEquals(originalSize, size); } }