private static LruBlockCache createOnHeapCache(final Configuration c) { final long cacheSize = MemorySizeUtil.getOnHeapCacheSize(c); if (cacheSize < 0) { return null; } int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE); LOG.info( "Allocating onheap LruBlockCache size=" + StringUtils.byteDesc(cacheSize) + ", blockSize=" + StringUtils.byteDesc(blockSize)); return new LruBlockCache(cacheSize, blockSize, true, c); }
private static List<BlockCache> getBlockCaches() throws IOException { Configuration conf = TEST_UTIL.getConfiguration(); List<BlockCache> blockcaches = new ArrayList<>(); // default blockcaches.add(BlockCacheFactory.createBlockCache(conf)); //set LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME to 2.0f due to HBASE-16287 TEST_UTIL.getConfiguration().setFloat(LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME, 2.0f); // memory BlockCache lru = new LruBlockCache(128 * 1024 * 1024, 64 * 1024, TEST_UTIL.getConfiguration()); blockcaches.add(lru); // bucket cache FileSystem.get(conf).mkdirs(TEST_UTIL.getDataTestDir()); int[] bucketSizes = { INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024, 128 * 1024 }; BlockCache bucketcache = new BucketCache("offheap", 128 * 1024 * 1024, 64 * 1024, bucketSizes, 5, 64 * 100, null); blockcaches.add(bucketcache); return blockcaches; }
final LruBlockCache cache = new LruBlockCache(maxSize, blockSize); EvictionThread evictionThread = cache.getEvictionThread(); assertTrue(evictionThread != null);
lazyCompressDisabled.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, false); CacheConfig cc = new CacheConfig(lazyCompressDisabled, new LruBlockCache(maxSize, HConstants.DEFAULT_BLOCKSIZE, false, lazyCompressDisabled)); assertFalse(cc.shouldCacheDataCompressed()); assertTrue(cc.getBlockCache().get() instanceof LruBlockCache); lazyCompressEnabled.setBoolean(CacheConfig.CACHE_DATA_BLOCKS_COMPRESSED_KEY, true); cc = new CacheConfig(lazyCompressEnabled, new LruBlockCache(maxSize, HConstants.DEFAULT_BLOCKSIZE, false, lazyCompressEnabled)); assertTrue("test improperly configured.", cc.shouldCacheDataCompressed()); assertTrue(cc.getBlockCache().get() instanceof LruBlockCache);
HFileBlock.FILL_HEADER, -1, -1, -1, meta); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR,
assertTrue("calculateBlockSize appears broken.", blockSize * numBlocks <= maxSize); LruBlockCache cache = new LruBlockCache(maxSize,blockSize); EvictionThread evictionThread = cache.getEvictionThread(); assertTrue(evictionThread != null);
private void testEncodingWithCacheInternals(boolean useTag) throws IOException { List<KeyValue> kvs = generator.generateTestKeyValues(60, useTag); HFileBlock block = getSampleHFileBlock(kvs, useTag); HFileBlock cacheBlock = createBlockOnDisk(kvs, block, useTag); LruBlockCache blockCache = new LruBlockCache(8 * 1024 * 1024, 32 * 1024); BlockCacheKey cacheKey = new BlockCacheKey("test", 0); blockCache.cacheBlock(cacheKey, cacheBlock); HeapSize heapSize = blockCache.getBlock(cacheKey, false, false, true); assertTrue(heapSize instanceof HFileBlock); HFileBlock returnedBlock = (HFileBlock) heapSize; if (blockEncoder.getDataBlockEncoding() == DataBlockEncoding.NONE) { assertEquals(block.getBufferReadOnly(), returnedBlock.getBufferReadOnly()); } else { if (BlockType.ENCODED_DATA != returnedBlock.getBlockType()) { System.out.println(blockEncoder); } assertEquals(BlockType.ENCODED_DATA, returnedBlock.getBlockType()); } }
long blockSize = calculateBlockSize(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR,
long blockSize = calculateBlockSizeDefault(maxSize, 101); LruBlockCache cache = new LruBlockCache(maxSize, blockSize);
@Test public void testCacheEvictionSimple() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSizeDefault(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false); CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block"); long expectedCacheSize = cache.heapSize(); // Add all the blocks for (CachedItem block : blocks) { cache.cacheBlock(block.cacheKey, block); expectedCacheSize += block.cacheBlockHeapSize(); } // A single eviction run should have occurred assertEquals(1, cache.getStats().getEvictionCount()); // Our expected size overruns acceptable limit assertTrue(expectedCacheSize > (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); // But the cache did not grow beyond max assertTrue(cache.heapSize() < maxSize); // And is still below the acceptable limit assertTrue(cache.heapSize() < (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); // All blocks except block 0 should be in the cache assertTrue(cache.getBlock(blocks[0].cacheKey, true, false, true) == null); for(int i=1;i<blocks.length;i++) { assertEquals(cache.getBlock(blocks[i].cacheKey, true, false, true), blocks[i]); } }
long blockSize = calculateBlockSize(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR,
long blockSize = calculateBlockSize(maxSize, 31); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR,
long blockSize = calculateBlockSizeDefault(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false);
long blockSize = calculateBlockSize(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR,
long blockSize = calculateBlockSize(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR,
/** * @param c Configuration to use. * @param mu JMX Memory Bean * @return An L1 instance. Currently an instance of LruBlockCache. */ private static LruBlockCache getL1(final Configuration c, final MemoryUsage mu) { long lruCacheSize = getLruCacheSize(c, mu); if (lruCacheSize < 0) return null; int blockSize = c.getInt(BLOCKCACHE_BLOCKSIZE_KEY, HConstants.DEFAULT_BLOCKSIZE); LOG.info("Allocating LruBlockCache size=" + StringUtils.byteDesc(lruCacheSize) + ", blockSize=" + StringUtils.byteDesc(blockSize)); return new LruBlockCache(lruCacheSize, blockSize, true, c); }
private static List<BlockCache> getBlockCaches() throws IOException { Configuration conf = TEST_UTIL.getConfiguration(); List<BlockCache> blockcaches = new ArrayList<>(); // default CacheConfig.instantiateBlockCache(conf); blockcaches.add(new CacheConfig(conf).getBlockCache()); //set LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME to 2.0f due to HBASE-16287 TEST_UTIL.getConfiguration().setFloat(LruBlockCache.LRU_HARD_CAPACITY_LIMIT_FACTOR_CONFIG_NAME, 2.0f); // memory BlockCache lru = new LruBlockCache(128 * 1024 * 1024, 64 * 1024, TEST_UTIL.getConfiguration()); blockcaches.add(lru); // bucket cache FileSystem.get(conf).mkdirs(TEST_UTIL.getDataTestDir()); int[] bucketSizes = { INDEX_BLOCK_SIZE, DATA_BLOCK_SIZE, BLOOM_BLOCK_SIZE, 64 * 1024, 128 * 1024 }; BlockCache bucketcache = new BucketCache("offheap", 128 * 1024 * 1024, 64 * 1024, bucketSizes, 5, 64 * 100, null); blockcaches.add(bucketcache); return blockcaches; }
private void testEncodingWithCacheInternals(boolean useTag) throws IOException { List<KeyValue> kvs = generator.generateTestKeyValues(60, useTag); HFileBlock block = getSampleHFileBlock(kvs, useTag); HFileBlock cacheBlock = createBlockOnDisk(kvs, block, useTag); LruBlockCache blockCache = new LruBlockCache(8 * 1024 * 1024, 32 * 1024); BlockCacheKey cacheKey = new BlockCacheKey("test", 0); blockCache.cacheBlock(cacheKey, cacheBlock); HeapSize heapSize = blockCache.getBlock(cacheKey, false, false, true); assertTrue(heapSize instanceof HFileBlock); HFileBlock returnedBlock = (HFileBlock) heapSize;; if (blockEncoder.getDataBlockEncoding() == DataBlockEncoding.NONE) { assertEquals(block.getBufferReadOnly(), returnedBlock.getBufferReadOnly()); } else { if (BlockType.ENCODED_DATA != returnedBlock.getBlockType()) { System.out.println(blockEncoder); } assertEquals(BlockType.ENCODED_DATA, returnedBlock.getBlockType()); } }
long blockSize = calculateBlockSize(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize, blockSize, false, (int)Math.ceil(1.2*maxSize/blockSize), LruBlockCache.DEFAULT_LOAD_FACTOR,
@Test public void testCacheEvictionSimple() throws Exception { long maxSize = 100000; long blockSize = calculateBlockSizeDefault(maxSize, 10); LruBlockCache cache = new LruBlockCache(maxSize,blockSize,false); CachedItem [] blocks = generateFixedBlocks(10, blockSize, "block"); long expectedCacheSize = cache.heapSize(); // Add all the blocks for (CachedItem block : blocks) { cache.cacheBlock(block.cacheKey, block); expectedCacheSize += block.cacheBlockHeapSize(); } // A single eviction run should have occurred assertEquals(1, cache.getStats().getEvictionCount()); // Our expected size overruns acceptable limit assertTrue(expectedCacheSize > (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); // But the cache did not grow beyond max assertTrue(cache.heapSize() < maxSize); // And is still below the acceptable limit assertTrue(cache.heapSize() < (maxSize * LruBlockCache.DEFAULT_ACCEPTABLE_FACTOR)); // All blocks except block 0 should be in the cache assertTrue(cache.getBlock(blocks[0].cacheKey, true, false, true) == null); for(int i=1;i<blocks.length;i++) { assertEquals(cache.getBlock(blocks[i].cacheKey, true, false, true), blocks[i]); } }