@Override public ByteString compress(byte[] data, Enum dictIndex) { return UnsafeByteOperations.unsafeWrap(data); } }
private static ByteString wrap(ByteBuffer b, int offset, int length) { ByteBuffer dup = b.duplicate(); dup.position(offset); dup.limit(offset + length); return UnsafeByteOperations.unsafeWrap(dup); }
private static List<ClusterStatusProtos.StoreSequenceId> toStoreSequenceId( Map<byte[], Long> ids) { return ids.entrySet().stream() .map(e -> ClusterStatusProtos.StoreSequenceId.newBuilder() .setFamilyName(UnsafeByteOperations.unsafeWrap(e.getKey())) .setSequenceId(e.getValue()) .build()) .collect(Collectors.toList()); }
@Override public SnapshotRegionManifest.FamilyFiles.Builder familyOpen( final SnapshotRegionManifest.Builder region, final byte[] familyName) { SnapshotRegionManifest.FamilyFiles.Builder family = SnapshotRegionManifest.FamilyFiles.newBuilder(); family.setFamilyName(UnsafeByteOperations.unsafeWrap(familyName)); return family; }
protected static long extractSnapshotSize( byte[] data, int offset, int length) throws InvalidProtocolBufferException { ByteString byteStr = UnsafeByteOperations.unsafeWrap(data, offset, length); return org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuotaSnapshot .parseFrom(byteStr).getQuotaUsage(); } }
/** * @return completed sequence id per store. * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0 * Use {@link #getStoreSequenceId} instead. */ @Deprecated public List<ClusterStatusProtos.StoreSequenceId> getStoreCompleteSequenceId() { return metrics.getStoreSequenceId().entrySet().stream() .map(s -> ClusterStatusProtos.StoreSequenceId.newBuilder() .setFamilyName(UnsafeByteOperations.unsafeWrap(s.getKey())) .setSequenceId(s.getValue()) .build()) .collect(Collectors.toList()); }
/** * Creates a request for querying the master the last flushed sequence Id for a region * @param regionName * @return A {@link GetLastFlushedSequenceIdRequest} */ public static GetLastFlushedSequenceIdRequest buildGetLastFlushedSequenceIdRequest( byte[] regionName) { return GetLastFlushedSequenceIdRequest.newBuilder().setRegionName( UnsafeByteOperations.unsafeWrap(regionName)).build(); }
public static ComparatorProtos.ByteArrayComparable toByteArrayComparable(final byte [] value) { ComparatorProtos.ByteArrayComparable.Builder builder = ComparatorProtos.ByteArrayComparable.newBuilder(); if (value != null) builder.setValue(UnsafeByteOperations.unsafeWrap(value)); return builder.build(); }
/** * Convert a ByteArrayComparable to a protocol buffer Comparator * * @param comparator the ByteArrayComparable to convert * @return the converted protocol buffer Comparator */ public static ComparatorProtos.Comparator toComparator(ByteArrayComparable comparator) { ComparatorProtos.Comparator.Builder builder = ComparatorProtos.Comparator.newBuilder(); builder.setName(comparator.getClass().getName()); builder.setSerializedComparator(UnsafeByteOperations.unsafeWrap(comparator.toByteArray())); return builder.build(); }
/** * Convert a client Filter to a protocol buffer Filter * * @param filter the Filter to convert * @return the converted protocol buffer Filter */ public static FilterProtos.Filter toFilter(Filter filter) throws IOException { FilterProtos.Filter.Builder builder = FilterProtos.Filter.newBuilder(); builder.setName(filter.getClass().getName()); builder.setSerializedFilter(UnsafeByteOperations.unsafeWrap(filter.toByteArray())); return builder.build(); }
public static HBaseProtos.TableName toProtoTableName(TableName tableName) { return HBaseProtos.TableName.newBuilder() .setNamespace(UnsafeByteOperations.unsafeWrap(tableName.getNamespace())) .setQualifier(UnsafeByteOperations.unsafeWrap(tableName.getQualifier())).build(); }
/** * Write out this instance on the passed in <code>out</code> stream. * We write it as a protobuf. * @param out * @throws IOException * @see #read(DataInputStream) */ void write(final DataOutputStream out) throws IOException { HFileProtos.FileInfoProto.Builder builder = HFileProtos.FileInfoProto.newBuilder(); for (Map.Entry<byte [], byte[]> e: this.map.entrySet()) { HBaseProtos.BytesBytesPair.Builder bbpBuilder = HBaseProtos.BytesBytesPair.newBuilder(); bbpBuilder.setFirst(UnsafeByteOperations.unsafeWrap(e.getKey())); bbpBuilder.setSecond(UnsafeByteOperations.unsafeWrap(e.getValue())); builder.addMapEntry(bbpBuilder.build()); } out.write(ProtobufMagic.PB_MAGIC); builder.build().writeDelimitedTo(out); }
/** * @return The filter serialized using pb */ @Override public byte [] toByteArray() { FilterProtos.MultipleColumnPrefixFilter.Builder builder = FilterProtos.MultipleColumnPrefixFilter.newBuilder(); for (byte [] element : sortedPrefixes) { if (element != null) builder.addSortedPrefixes(UnsafeByteOperations.unsafeWrap(element)); } return builder.build().toByteArray(); }
/** * @return The filter serialized using pb */ @Override public byte [] toByteArray() { FilterProtos.PrefixFilter.Builder builder = FilterProtos.PrefixFilter.newBuilder(); if (this.prefix != null) builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.prefix)); return builder.build().toByteArray(); }
/** * @return The filter serialized using pb */ @Override public byte [] toByteArray() { FilterProtos.ColumnRangeFilter.Builder builder = FilterProtos.ColumnRangeFilter.newBuilder(); if (this.minColumn != null) builder.setMinColumn( UnsafeByteOperations.unsafeWrap(this.minColumn)); builder.setMinColumnInclusive(this.minColumnInclusive); if (this.maxColumn != null) builder.setMaxColumn( UnsafeByteOperations.unsafeWrap(this.maxColumn)); builder.setMaxColumnInclusive(this.maxColumnInclusive); return builder.build().toByteArray(); }
/** * @return The filter serialized using pb */ @Override public byte [] toByteArray() { FilterProtos.InclusiveStopFilter.Builder builder = FilterProtos.InclusiveStopFilter.newBuilder(); if (this.stopRowKey != null) builder.setStopRowKey( UnsafeByteOperations.unsafeWrap(this.stopRowKey)); return builder.build().toByteArray(); }
public RegionStoreSequenceIds getLastFlushedSequenceId(byte[] encodedRegionName) { RegionStoreSequenceIds.Builder builder = RegionStoreSequenceIds.newBuilder(); Long seqId = flushedSequenceIdByRegion.get(encodedRegionName); builder.setLastFlushedSequenceId(seqId != null ? seqId.longValue() : HConstants.NO_SEQNUM); Map<byte[], Long> storeFlushedSequenceId = storeFlushedSequenceIdsByRegion.get(encodedRegionName); if (storeFlushedSequenceId != null) { for (Map.Entry<byte[], Long> entry : storeFlushedSequenceId.entrySet()) { builder.addStoreSequenceId(StoreSequenceId.newBuilder() .setFamilyName(UnsafeByteOperations.unsafeWrap(entry.getKey())) .setSequenceId(entry.getValue().longValue()).build()); } } return builder.build(); }
/** * @return The filter serialized using pb */ @Override public byte [] toByteArray() { FilterProtos.ColumnPrefixFilter.Builder builder = FilterProtos.ColumnPrefixFilter.newBuilder(); if (this.prefix != null) builder.setPrefix(UnsafeByteOperations.unsafeWrap(this.prefix)); return builder.build().toByteArray(); }
/** * @return The filter serialized using pb */ @Override public byte [] toByteArray() { FilterProtos.FirstKeyValueMatchingQualifiersFilter.Builder builder = FilterProtos.FirstKeyValueMatchingQualifiersFilter.newBuilder(); for (byte[] qualifier : qualifiers) { if (qualifier != null) builder.addQualifiers(UnsafeByteOperations.unsafeWrap(qualifier)); } return builder.build().toByteArray(); }
/** * @return A pb instance to represent this instance. */ FilterProtos.ColumnValueFilter convert() { FilterProtos.ColumnValueFilter.Builder builder = FilterProtos.ColumnValueFilter.newBuilder(); builder.setFamily(UnsafeByteOperations.unsafeWrap(this.family)); builder.setQualifier(UnsafeByteOperations.unsafeWrap(this.qualifier)); builder.setCompareOp(HBaseProtos.CompareType.valueOf(this.op.name())); builder.setComparator(ProtobufUtil.toComparator(this.comparator)); return builder.build(); }