reader.getTrailer().getNumDataIndexLevels()); assertTrue(Bytes.equals(keys[0], ((KeyValue)reader.getFirstKey().get()).getKey())); assertTrue(Bytes.equals(keys[NUM_KV - 1], ((KeyValue)reader.getLastKey().get()).getKey())); LOG.info("Last key: " + Bytes.toStringBinary(keys[NUM_KV - 1])); HFileScanner scanner = reader.getScanner(true, pread); for (int i = 0; i < NUM_KV; ++i) { checkSeekTo(keys, scanner, i); HFileBlock.FSReader fsReader = reader2.getUncachedBlockReader(); reader.getTrailer().getLoadOnOpenDataOffset()); HFileBlock block; List<byte[]> blockKeys = new ArrayList<>(); reader.midKey()); reader.getTrailer().getUncompressedDataIndexSize()); reader.close(); reader2.close();
try { hf = HFile.createReader(fs, hfile.getPath(), CacheConfig.DISABLED, true, getConf()); hf.loadFileInfo(); Optional<Cell> startKv = hf.getFirstKey(); start = CellUtil.cloneRow(startKv.get()); Optional<Cell> endKv = hf.getLastKey(); end = CellUtil.cloneRow(endKv.get()); } catch (IOException ioe) { } finally { if (hf != null) { hf.close();
Map<byte[], byte[]> fileInfo = reader.loadFileInfo(); HFileScanner scanner = reader.getScanner(false, false, false); fileStats = new KeyValueStatsCollector(); boolean shouldScanKeysValues = false; out.println(reader.getDataBlockIndexReader()); HFileBlock block; while (offset <= max) { block = reader.readBlock(offset, -1, /* cacheBlock */ false, /* pread */ false, /* isCompaction */ false, /* updateCacheMetrics */ false, null, null); offset += block.getOnDiskSizeWithHeader(); reader.close(); return 0;
CacheConfig cacheConf = new CacheConfig(getConf()); hf = HFile.createReader(fs, hfile.getPath(), cacheConf); hf.loadFileInfo(); KeyValue startKv = KeyValue.createKeyValueFromKey(hf.getFirstKey()); start = startKv.getRow(); KeyValue endKv = KeyValue.createKeyValueFromKey(hf.getLastKey()); end = endKv.getRow(); } catch (IOException ioe) { } finally { if (hf != null) { hf.close();
private void printMeta(HFile.Reader reader, Map<byte[], byte[]> fileInfo) throws IOException { System.out.println("Block index size as per heapsize: " + reader.indexSize()); System.out.println(asSeparateLines(reader.toString())); System.out.println("Trailer:\n " + asSeparateLines(reader.getTrailer().toString())); System.out.println("Fileinfo:"); for (Map.Entry<byte[], byte[]> e : fileInfo.entrySet()) { System.out.println("Mid-key: " + Bytes.toStringBinary(reader.midkey())); } catch (Exception e) { System.out.println ("Unable to retrieve the midkey"); DataInput bloomMeta = reader.getGeneralBloomFilterMetadata(); BloomFilter bloomFilter = null; if (bloomMeta != null) bloomMeta = reader.getDeleteBloomFilterMetadata(); bloomFilter = null; if (bloomMeta != null)
Reader reader = HFile.createReader(fs, path, new CacheConfig(fs.getConf()), fs.getConf()); BlockIndexReader bir = reader.getDataBlockIndexReader(); int blockCount = bir.getRootBlockCount(); reader.close();
reader = HFile.createReader(srcPath.getFileSystem(conf), srcPath, cacheConf); reader.loadFileInfo(); byte[] firstKey = reader.getFirstRowKey(); byte[] lk = reader.getLastKey(); byte[] lastKey = (lk == null) ? null : HFileScanner scanner = reader.getScanner(false, false, false); scanner.seekTo(); do { if (reader != null) reader.close();
private void printMeta(HFile.Reader reader, Map<byte[], byte[]> fileInfo) throws IOException { System.out.println("Block index size as per heapsize: " + reader.indexSize()); System.out.println(asSeparateLines(reader.toString())); System.out.println("Trailer:\n " + asSeparateLines(reader.getTrailer().toString())); System.out.println("Fileinfo:"); for (Map.Entry<byte[], byte[]> e : fileInfo.entrySet()) { System.out.println("Mid-key: " + Bytes.toStringBinary(reader.midkey())); DataInput bloomMeta = reader.getGeneralBloomFilterMetadata(); BloomFilter bloomFilter = null; if (bloomMeta != null) bloomMeta = reader.getDeleteBloomFilterMetadata(); bloomFilter = null; if (bloomMeta != null)
return; // Bloom has been loaded DataInput bloomMeta = reader.getGeneralBloomFilterMetadata(); if (bloomMeta != null) { LOG.trace("Loaded " + bloomFilterType.toString() + " " + generalBloomFilter.getClass().getSimpleName() + " metadata for " + reader.getName()); return; // Bloom has been loaded DataInput bloomMeta = reader.getDeleteBloomFilterMetadata(); if (bloomMeta != null) { deleteFamilyBloomFilter = BloomFilterFactory.createFromMeta( LOG.info("Loaded Delete Family Bloom (" + deleteFamilyBloomFilter.getClass().getSimpleName() + ") metadata for " + reader.getName());
return; // Bloom has been loaded DataInput bloomMeta = reader.getGeneralBloomFilterMetadata(); if (bloomMeta != null) { LOG.info("Loaded " + bloomFilterType.toString() + " (" + generalBloomFilter.getClass().getSimpleName() + ") metadata for " + reader.getName()); return; // Bloom has been loaded DataInput bloomMeta = reader.getDeleteBloomFilterMetadata(); if (bloomMeta != null) { deleteFamilyBloomFilter = BloomFilterFactory.createFromMeta( LOG.info("Loaded Delete Family Bloom (" + deleteFamilyBloomFilter.getClass().getSimpleName() + ") metadata for " + reader.getName());
Reader reader = HFile.createReader(fs, path1, new CacheConfig(fs.getConf()), fs.getConf()); BlockIndexReader bir = reader.getDataBlockIndexReader(); int blockCount = bir.getRootBlockCount(); reader.close();
@Override public boolean seekBefore(Cell key) throws IOException { HFileBlock seekToBlock = reader.getDataBlockIndexReader().seekToDataBlock(key, block, cacheBlocks, pread, isCompaction, ((HFileReaderV2) reader).getEffectiveEncodingInCache(isCompaction)); if (reader.getComparator() .compareOnlyKeyPortion( new KeyValue.KeyOnlyKeyValue(firstKey.array(), firstKey.arrayOffset(), seekToBlock = reader.readBlock(previousBlockOffset, prevBlockSize, cacheBlocks, pread, isCompaction, true, BlockType.DATA, getEffectiveDataBlockEncoding());
Map<byte[], byte[]> fileInfo = reader.loadFileInfo(); HFileScanner scanner = reader.getScanner(false, false, false); fileStats = new KeyValueStatsCollector(); boolean shouldScanKeysValues = false; System.out.println(reader.getDataBlockIndexReader()); reader.close();
closed = false; if (reader.getComparator() instanceof DelegatingSerializedComparator) { loadComparators((DelegatingSerializedComparator) reader.getComparator()); DataInput bin = reader.getGeneralBloomFilterMetadata(); if (bin != null) { final org.apache.hadoop.hbase.util.BloomFilter hbloom = BloomFilterFactory.createFromMeta(bin, reader); if (reader.getComparator() instanceof DelegatingSerializedComparator) { loadComparators((DelegatingSerializedComparator) hbloom.getComparator());
throws IOException { HFileBlock seekToBlock = reader.getDataBlockIndexReader().seekToDataBlock(key, offset, length, block, cacheBlocks, pread, isCompaction); if (seekToBlock == null) { if (reader.getComparator().compare(firstKey.array(), firstKey.arrayOffset(), firstKey.limit(), key, offset, length) == 0) seekToBlock = reader.readBlock(previousBlockOffset, seekToBlock.getOffset() - previousBlockOffset, cacheBlocks, pread, isCompaction, BlockType.DATA);
/** * An internal API function. Seek to the given key, optionally rewinding to * the first key of the block before doing the seek. * * @param key - a cell representing the key that we need to fetch * @param rewind whether to rewind to the first key of the block before * doing the seek. If this is false, we are assuming we never go * back, otherwise the result is undefined. * @return -1 if the key is earlier than the first key of the file, * 0 if we are at the given key, 1 if we are past the given key * -2 if the key is earlier than the first key of the file while * using a faked index key * @throws IOException */ public int seekTo(Cell key, boolean rewind) throws IOException { HFileBlockIndex.BlockIndexReader indexReader = reader.getDataBlockIndexReader(); BlockWithScanInfo blockWithScanInfo = indexReader.loadDataBlockWithScanInfo(key, block, cacheBlocks, pread, isCompaction, getEffectiveDataBlockEncoding()); if (blockWithScanInfo == null || blockWithScanInfo.getHFileBlock() == null) { // This happens if the key e.g. falls before the beginning of the file. return -1; } return loadBlockAndSeekToKey(blockWithScanInfo.getHFileBlock(), blockWithScanInfo.getNextIndexedKey(), rewind, key, false); }
Reader reader = HFile.createReaderFromStream(path, fsdis, fs.getFileStatus(path).getLen(), new CacheConfig(conf), conf); reader.loadFileInfo(); KeySampler kSampler = new KeySampler(rng, ((KeyValue) reader.getFirstKey().get()).getKey(), ((KeyValue) reader.getLastKey().get()).getKey(), keyLenGen); HFileScanner scanner = reader.getScanner(false, USE_PREAD); BytesWritable key = new BytesWritable(); timer.reset();
Configuration conf = TEST_UTIL.getConfiguration(); HFile.Reader reader = HFile.createReader(fs, p, new CacheConfig(conf), true, conf); reader.loadFileInfo(); HFileBlockIndex.BlockIndexReader blockIndexReader = reader.getDataBlockIndexReader(); System.out.println(blockIndexReader.toString()); assertEquals(1, blockIndexReader.rootBlockContainingKey( toKV("l", tagUsage))); reader.close(); deleteTestDir(fs);
/** * An internal API function. Seek to the given key, optionally rewinding to * the first key of the block before doing the seek. * * @param key key byte array * @param offset key offset in the key byte array * @param length key length * @param rewind whether to rewind to the first key of the block before * doing the seek. If this is false, we are assuming we never go * back, otherwise the result is undefined. * @return -1 if the key is earlier than the first key of the file, * 0 if we are at the given key, and 1 if we are past the given key * @throws IOException */ protected int seekTo(byte[] key, int offset, int length, boolean rewind) throws IOException { HFileBlockIndex.BlockIndexReader indexReader = reader.getDataBlockIndexReader(); BlockWithScanInfo blockWithScanInfo = indexReader.loadDataBlockWithScanInfo(key, offset, length, block, cacheBlocks, pread, isCompaction); if (blockWithScanInfo == null || blockWithScanInfo.getHFileBlock() == null) { // This happens if the key e.g. falls before the beginning of the file. return -1; } return loadBlockAndSeekToKey(blockWithScanInfo.getHFileBlock(), blockWithScanInfo.getNextIndexedKey(), rewind, key, offset, length, false); }
public static void doSmokeTest(FileSystem fs, Path path, String codec) throws Exception { Configuration conf = HBaseConfiguration.create(); HFile.Writer writer = HFile.getWriterFactoryNoCache(conf) .withPath(fs, path) .withCompression(codec) .create(); writer.append(Bytes.toBytes("testkey"), Bytes.toBytes("testval")); writer.appendFileInfo(Bytes.toBytes("infokey"), Bytes.toBytes("infoval")); writer.close(); HFile.Reader reader = HFile.createReader(fs, path, new CacheConfig(conf)); reader.loadFileInfo(); byte[] key = reader.getFirstKey(); boolean rc = Bytes.toString(key).equals("testkey"); reader.close(); if (!rc) { throw new Exception("Read back incorrect result: " + Bytes.toStringBinary(key)); } }