/** * Used in testing. May be very inefficient. * * @return the set of cached file names */ @VisibleForTesting SortedSet<String> getCachedFileNamesForTest() { SortedSet<String> fileNames = new TreeSet<>(); for (BlockCacheKey cacheKey : map.keySet()) { fileNames.add(cacheKey.getHfileName()); } return fileNames; }
@Override public int compare(BlockCacheKey a, BlockCacheKey b) { int nameComparison = a.getHfileName().compareTo(b.getHfileName()); if (nameComparison != 0) { return nameComparison; } if (a.getOffset() == b.getOffset()) { return 0; } else if (a.getOffset() < b.getOffset()) { return -1; } return 1; } });
/** * Evicts all blocks for a specific HFile. This is an * expensive operation implemented as a linear-time search through all blocks * in the cache. Ideally this should be a search in a log-access-time map. * * <p> * This is used for evict-on-close to remove all blocks of a specific HFile. * * @return the number of blocks evicted */ @Override public int evictBlocksByHfileName(String hfileName) { int numEvicted = 0; for (BlockCacheKey key : map.keySet()) { if (key.getHfileName().equals(hfileName)) { if (evictBlock(key)) ++numEvicted; } } if (victimHandler != null) { numEvicted += victimHandler.evictBlocksByHfileName(hfileName); } return numEvicted; }
+ cacheKey.getHfileName() + " @ " + cacheKey.getOffset() + " is " + buf.heapSize()
private static BucketCacheProtos.BlockCacheKey toPB(BlockCacheKey key) { return BucketCacheProtos.BlockCacheKey.newBuilder() .setHfilename(key.getHfileName()) .setOffset(key.getOffset()) .setPrimaryReplicaBlock(key.isPrimary()) .setBlockType(toPB(key.getBlockType())) .build(); }
assertEquals(queue.pollLast().getCacheKey().getHfileName(), "cb"+i);
assertEquals(queue.pollLast().getCacheKey().getHfileName(), "cb"+i);
/** * Used in testing. May be very inefficient. * @return the set of cached file names */ SortedSet<String> getCachedFileNamesForTest() { SortedSet<String> fileNames = new TreeSet<String>(); for (BlockCacheKey cacheKey : map.keySet()) { fileNames.add(cacheKey.getHfileName()); } return fileNames; }
@Override public int evictBlocksByHfileName(String hfileName) { int evictedCount = 0; for (BlockCacheKey e : backingMap.keySet()) { if (e.getHfileName().equals(hfileName)) { this.evictBlock(e); } } return evictedCount; }
public int evictBlocksByHfileName(String hfileName) { int numEvicted = 0; for (BlockCacheKey key : backingStore.keySet()) { if (key.getHfileName().equals(hfileName)) { if (evictBlock(key)) ++numEvicted; } } return numEvicted; }
/** * Used in testing. May be very inefficient. * @return the set of cached file names */ @VisibleForTesting SortedSet<String> getCachedFileNamesForTest() { SortedSet<String> fileNames = new TreeSet<String>(); for (BlockCacheKey cacheKey : map.keySet()) { fileNames.add(cacheKey.getHfileName()); } return fileNames; }
/** * Evicts all blocks for a specific HFile. This is an * expensive operation implemented as a linear-time search through all blocks * in the cache. Ideally this should be a search in a log-access-time map. * * <p> * This is used for evict-on-close to remove all blocks of a specific HFile. * * @return the number of blocks evicted */ @Override public int evictBlocksByHfileName(String hfileName) { int numEvicted = 0; for (BlockCacheKey key : map.keySet()) { if (key.getHfileName().equals(hfileName)) { if (evictBlock(key)) ++numEvicted; } } return numEvicted; }
/** * Evicts all blocks for a specific HFile. This is an * expensive operation implemented as a linear-time search through all blocks * in the cache. Ideally this should be a search in a log-access-time map. * * <p> * This is used for evict-on-close to remove all blocks of a specific HFile. * * @return the number of blocks evicted */ @Override public int evictBlocksByHfileName(String hfileName) { int numEvicted = 0; for (BlockCacheKey key : map.keySet()) { if (key.getHfileName().equals(hfileName)) { if (evictBlock(key)) ++numEvicted; } } if (victimHandler != null) { numEvicted += victimHandler.evictBlocksByHfileName(hfileName); } return numEvicted; }
@VisibleForTesting void blockEvicted(BlockCacheKey cacheKey, BucketEntry bucketEntry, boolean decrementBlockNumber) { bucketAllocator.freeBlock(bucketEntry.offset()); realCacheSize.addAndGet(-1 * bucketEntry.getLength()); blocksByHFile.remove(cacheKey.getHfileName(), cacheKey); if (decrementBlockNumber) { this.blockNumber.decrementAndGet(); } }
this.blockNumber.incrementAndGet(); this.heapSize.addAndGet(cachedItem.heapSize()); blocksByHFile.put(cacheKey.getHfileName(), cacheKey);
@Override public List<BlockCacheColumnFamilySummary> getBlockCacheColumnFamilySummaries(Configuration conf) throws IOException { Map<String, Path> sfMap = FSUtils.getTableStoreFilePathMap( FileSystem.get(conf), FSUtils.getRootDir(conf)); // quirky, but it's a compound key and this is a shortcut taken instead of // creating a class that would represent only a key. Map<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary> bcs = new HashMap<BlockCacheColumnFamilySummary, BlockCacheColumnFamilySummary>(); for (CachedBlock cb : map.values()) { String sf = cb.getCacheKey().getHfileName(); Path path = sfMap.get(sf); if ( path != null) { BlockCacheColumnFamilySummary lookup = BlockCacheColumnFamilySummary.createFromStoreFilePath(path); BlockCacheColumnFamilySummary bcse = bcs.get(lookup); if (bcse == null) { bcse = BlockCacheColumnFamilySummary.create(lookup); bcs.put(lookup,bcse); } bcse.incrementBlocks(); bcse.incrementHeapSize(cb.heapSize()); } } List<BlockCacheColumnFamilySummary> list = new ArrayList<BlockCacheColumnFamilySummary>(bcs.values()); Collections.sort( list ); return list; }
+ cacheKey.getHfileName() + " @ " + cacheKey.getOffset() + " is " + buf.heapSize()
assertEquals(queue.pollLast().getCacheKey().getHfileName(), "cb"+i);
assertEquals(queue.pollLast().getCacheKey().getHfileName(), "cb"+i);