private int getCurCellSerializedSize() { int curCellSize = KEY_VALUE_LEN_SIZE + currKeyLen + currValueLen + currMemstoreTSLen; if (this.reader.getFileContext().isIncludesTags()) { curCellSize += Bytes.SIZEOF_SHORT + currTagsLen; } return curCellSize; }
protected boolean includesTags() { return this.decodingCtx.getHFileContext().isIncludesTags(); }
ByteBufferUtils.skip(decompressedData, klen + vlen); if (meta.isIncludesTags()) { boolean noTags = true; if (it.hasNext()) {
vlength = in.getInt(); ByteBufferUtils.skip(in, klength + vlength); if (this.meta.isIncludesTags()) { tagsLength = ((in.get() & 0xff) << 8) ^ (in.get() & 0xff); ByteBufferUtils.skip(in, tagsLength);
@Override public ByteBuffer decodeKeyValues(DataInputStream source, HFileBlockDecodingContext blkDecodingCtx) throws IOException { if (blkDecodingCtx.getClass() != HFileBlockDefaultDecodingContext.class) { throw new IOException(this.getClass().getName() + " only accepts " + HFileBlockDefaultDecodingContext.class.getName() + " as the decoding context."); } HFileBlockDefaultDecodingContext decodingCtx = (HFileBlockDefaultDecodingContext) blkDecodingCtx; if (decodingCtx.getHFileContext().isIncludesTags() && decodingCtx.getHFileContext().isCompressTags()) { if (decodingCtx.getTagCompressionContext() != null) { // It will be overhead to create the TagCompressionContext again and again for every block // decoding. decodingCtx.getTagCompressionContext().clear(); } else { try { TagCompressionContext tagCompressionContext = new TagCompressionContext( LRUDictionary.class, Byte.MAX_VALUE); decodingCtx.setTagCompressionContext(tagCompressionContext); } catch (Exception e) { throw new IOException("Failed to initialize TagCompressionContext", e); } } } return internalDecodeKeyValues(source, 0, 0, decodingCtx); }
if (!decodingCtx.getHFileContext().isIncludesTags()) { sourceAsBuffer.position(sourceAsBuffer.limit() - Bytes.SIZEOF_INT); int onDiskSize = sourceAsBuffer.getInt();
protected void finishFileInfo() throws IOException { if (lastCell != null) { // Make a copy. The copy is stuffed into our fileinfo map. Needs a clean // byte buffer. Won't take a tuple. byte [] lastKey = PrivateCellUtil.getCellKeySerializedAsKeyValueKey(this.lastCell); fileInfo.append(FileInfo.LASTKEY, lastKey, false); } // Average key length. int avgKeyLen = entryCount == 0 ? 0 : (int) (totalKeyLength / entryCount); fileInfo.append(FileInfo.AVG_KEY_LEN, Bytes.toBytes(avgKeyLen), false); fileInfo.append(FileInfo.CREATE_TIME_TS, Bytes.toBytes(hFileContext.getFileCreateTime()), false); // Average value length. int avgValueLen = entryCount == 0 ? 0 : (int) (totalValueLength / entryCount); fileInfo.append(FileInfo.AVG_VALUE_LEN, Bytes.toBytes(avgValueLen), false); if (hFileContext.isIncludesTags()) { // When tags are not being written in this file, MAX_TAGS_LEN is excluded // from the FileInfo fileInfo.append(FileInfo.MAX_TAGS_LEN, Bytes.toBytes(this.maxTagsLength), false); boolean tagsCompressed = (hFileContext.getDataBlockEncoding() != DataBlockEncoding.NONE) && hFileContext.isCompressTags(); fileInfo.append(FileInfo.TAGS_COMPRESSED, Bytes.toBytes(tagsCompressed), false); } }
@Override public void startBlockEncoding(HFileBlockEncodingContext blkEncodingCtx, DataOutputStream out) throws IOException { if (blkEncodingCtx.getClass() != HFileBlockDefaultEncodingContext.class) { throw new IOException (this.getClass().getName() + " only accepts " + HFileBlockDefaultEncodingContext.class.getName() + " as the " + "encoding context."); } HFileBlockDefaultEncodingContext encodingCtx = (HFileBlockDefaultEncodingContext) blkEncodingCtx; encodingCtx.prepareEncoding(out); if (encodingCtx.getHFileContext().isIncludesTags() && encodingCtx.getHFileContext().isCompressTags()) { if (encodingCtx.getTagCompressionContext() != null) { // It will be overhead to create the TagCompressionContext again and again for every block // encoding. encodingCtx.getTagCompressionContext().clear(); } else { try { TagCompressionContext tagCompressionContext = new TagCompressionContext( LRUDictionary.class, Byte.MAX_VALUE); encodingCtx.setTagCompressionContext(tagCompressionContext); } catch (Exception e) { throw new IOException("Failed to initialize TagCompressionContext", e); } } } StreamUtils.writeInt(out, 0); // DUMMY length. This will be updated in endBlockEncoding() blkEncodingCtx.setEncodingState(new BufferedDataBlockEncodingState()); }
protected final void afterDecodingKeyValue(DataInputStream source, ByteBuffer dest, HFileBlockDefaultDecodingContext decodingCtx) throws IOException { if (decodingCtx.getHFileContext().isIncludesTags()) { int tagsLength = ByteBufferUtils.readCompressedInt(source);
protected void readKeyValueLen() { // This is a hot method. We go out of our way to make this method short so it can be // inlined and is not too big to compile. We also manage position in ByteBuffer ourselves // because it is faster than going via range-checked ByteBuffer methods or going through a // byte buffer array a byte at a time. // Get a long at a time rather than read two individual ints. In micro-benchmarking, even // with the extra bit-fiddling, this is order-of-magnitude faster than getting two ints. // Trying to imitate what was done - need to profile if this is better or // earlier way is better by doing mark and reset? // But ensure that you read long instead of two ints long ll = blockBuffer.getLongAfterPosition(0); // Read top half as an int of key length and bottom int as value length this.currKeyLen = (int)(ll >> Integer.SIZE); this.currValueLen = (int)(Bytes.MASK_FOR_LOWER_INT_IN_LONG ^ ll); checkKeyValueLen(); // Move position past the key and value lengths and then beyond the key and value int p = (Bytes.SIZEOF_LONG + currKeyLen + currValueLen); if (reader.getFileContext().isIncludesTags()) { // Tags length is a short. this.currTagsLen = blockBuffer.getShortAfterPosition(p); checkTagsLen(); p += (Bytes.SIZEOF_SHORT + currTagsLen); } readMvccVersion(p); }
public int write(Cell cell) throws IOException { // We write tags seperately because though there is no tag in KV // if the hfilecontext says include tags we need the tags length to be // written int size = KeyValueUtil.oswrite(cell, out, false); // Write the additional tag into the stream if (encodingCtx.getHFileContext().isIncludesTags()) { int tagsLength = cell.getTagsLength(); out.writeShort(tagsLength); if (tagsLength > 0) { PrivateCellUtil.writeTags(out, cell, tagsLength); } size += tagsLength + KeyValue.TAGS_LENGTH_SIZE; } if (encodingCtx.getHFileContext().isIncludesMvcc()) { WritableUtils.writeVLong(out, cell.getSequenceId()); size += WritableUtils.getVIntSize(cell.getSequenceId()); } return size; }
@Test public void testFlushedFileWithNoTags() throws Exception { final TableName tableName = TableName.valueOf(name.getMethodName()); HTableDescriptor htd = new HTableDescriptor(tableName); htd.addFamily(new HColumnDescriptor(fam1)); HRegionInfo info = new HRegionInfo(tableName, null, null, false); Path path = TEST_UTIL.getDataTestDir(getClass().getSimpleName()); region = HBaseTestingUtility.createRegionAndWAL(info, path, TEST_UTIL.getConfiguration(), htd); Put put = new Put(Bytes.toBytes("a-b-0-0")); put.addColumn(fam1, qual1, Bytes.toBytes("c1-value")); region.put(put); region.flush(true); HStore store = region.getStore(fam1); Collection<HStoreFile> storefiles = store.getStorefiles(); for (HStoreFile sf : storefiles) { assertFalse("Tags should not be present " ,sf.getReader().getHFileReader().getFileContext().isIncludesTags()); } }
/** * Use this constructor if you want to change a few settings only in another context. */ public HFileContextBuilder(final HFileContext hfc) { this.usesHBaseChecksum = hfc.isUseHBaseChecksum(); this.includesMvcc = hfc.isIncludesMvcc(); this.includesTags = hfc.isIncludesTags(); this.compression = hfc.getCompression(); this.compressTags = hfc.isCompressTags(); this.checksumType = hfc.getChecksumType(); this.bytesPerChecksum = hfc.getBytesPerChecksum(); this.blocksize = hfc.getBlocksize(); this.encoding = hfc.getDataBlockEncoding(); this.cryptoContext = hfc.getEncryptionContext(); this.fileCreateTime = hfc.getFileCreateTime(); this.hfileName = hfc.getHFileName(); }
HFileBlockDefaultEncodingContext encodingCtx) throws IOException { int size = 0; if (encodingCtx.getHFileContext().isIncludesTags()) { int tagsLength = cell.getTagsLength(); ByteBufferUtils.putCompressedInt(out, tagsLength);
@Test public void testFlushedFileWithVisibilityTags() throws Exception { final byte[] qual2 = Bytes.toBytes("qual2"); TableName tableName = TableName.valueOf(TEST_NAME.getMethodName()); HTableDescriptor desc = new HTableDescriptor(tableName); HColumnDescriptor col = new HColumnDescriptor(fam); desc.addFamily(col); TEST_UTIL.getAdmin().createTable(desc); try (Table table = TEST_UTIL.getConnection().getTable(tableName)) { Put p1 = new Put(row1); p1.addColumn(fam, qual, value); p1.setCellVisibility(new CellVisibility(CONFIDENTIAL)); Put p2 = new Put(row1); p2.addColumn(fam, qual2, value); p2.setCellVisibility(new CellVisibility(SECRET)); RowMutations rm = new RowMutations(row1); rm.add(p1); rm.add(p2); table.mutateRow(rm); } TEST_UTIL.getAdmin().flush(tableName); List<HRegion> regions = TEST_UTIL.getHBaseCluster().getRegions(tableName); HStore store = regions.get(0).getStore(fam); Collection<HStoreFile> storefiles = store.getStorefiles(); assertTrue(storefiles.size() > 0); for (HStoreFile storeFile : storefiles) { assertTrue(storeFile.getReader().getHFileReader().getFileContext().isIncludesTags()); } }
PrivateCellUtil.compareKeyIgnoresMvcc(reader.getComparator(), key, bufBackedKeyOnlyKv); offsetFromPos += klen + vlen; if (this.reader.getFileContext().isIncludesTags()) { if (reader.getFileContext().isIncludesTags()) { lastKeyValueSize += tlen + Bytes.SIZEOF_SHORT;
KeyValueScanner scanner = reader.getStoreFileScanner(true, true, false, hsf.getMaxMemStoreTS(), 0, false); USE_TAG = reader.getHFileReader().getFileContext().isIncludesTags();
.withCompressTags(fileContext.isCompressTags()) .withIncludesMvcc(fileContext.isIncludesMvcc()) .withIncludesTags(fileContext.isIncludesTags()) .build(); return new HFileBlock(blockType, getOnDiskSizeWithoutHeader(),
@Override protected int getNextCellStartPosition() { int nextKvPos = super.getNextCellStartPosition(); if (reader.hfileContext.isIncludesTags()) { nextKvPos += Bytes.SIZEOF_SHORT + currTagsLen; } return nextKvPos; }
protected boolean includesTags() { return this.decodingCtx.getHFileContext().isIncludesTags(); }