/** * <code>repeated .hbase.pb.StoreSequenceId store_sequence_id = 2;</code> */ public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreSequenceIdBuilder() { return getStoreSequenceIdFieldBuilder().addBuilder( org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()); } /**
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (hasSequenceId()) { hash = (37 * hash) + SEQUENCE_ID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getSequenceId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeBytes(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { output.writeUInt64(2, sequenceId_); } getUnknownFields().writeTo(output); }
Map<byte[], Long> maxSeqIdInStores = new TreeMap<byte[], Long>(Bytes.BYTES_COMPARATOR); for (StoreSequenceId storeSeqId : ids.getStoreSequenceIdList()) { maxSeqIdInStores.put(storeSeqId.getFamilyName().toByteArray(), storeSeqId.getSequenceId());
List<StoreSequenceId> maxSeqIdInStores = ids.getStoreSequenceIdList(); for (StoreSequenceId id : maxSeqIdInStores) { storeIds.put(id.getFamilyName().toByteArray(), id.getSequenceId());
getOrCreateStoreFlushedSequenceId(encodedRegionName); for (StoreSequenceId storeSeqId : entry.getValue().getStoreCompleteSequenceId()) { byte[] family = storeSeqId.getFamilyName().toByteArray(); existingValue = storeFlushedSequenceId.get(family); l = storeSeqId.getSequenceId(); if (LOG.isTraceEnabled()) { LOG.trace(Bytes.toString(encodedRegionName) + ", family=" + Bytes.toString(family) +
/** * @param regionLastFlushedSequenceId the flushed sequence id of a region which is the min of its * store max seq ids * @param storeSequenceIds column family to sequence Id map * @return Serialized protobuf of <code>RegionSequenceIds</code> with pb magic prefix prepended * suitable for use to filter wal edits in distributedLogReplay mode */ public static byte[] regionSequenceIdsToByteArray(final Long regionLastFlushedSequenceId, final Map<byte[], Long> storeSequenceIds) { ClusterStatusProtos.RegionStoreSequenceIds.Builder regionSequenceIdsBuilder = ClusterStatusProtos.RegionStoreSequenceIds.newBuilder(); ClusterStatusProtos.StoreSequenceId.Builder storeSequenceIdBuilder = ClusterStatusProtos.StoreSequenceId.newBuilder(); if (storeSequenceIds != null) { for (Map.Entry<byte[], Long> e : storeSequenceIds.entrySet()){ byte[] columnFamilyName = e.getKey(); Long curSeqId = e.getValue(); storeSequenceIdBuilder.setFamilyName(ByteStringer.wrap(columnFamilyName)); storeSequenceIdBuilder.setSequenceId(curSeqId); regionSequenceIdsBuilder.addStoreSequenceId(storeSequenceIdBuilder.build()); storeSequenceIdBuilder.clear(); } } regionSequenceIdsBuilder.setLastFlushedSequenceId(regionLastFlushedSequenceId); byte[] result = regionSequenceIdsBuilder.build().toByteArray(); return ProtobufUtil.prependPBMagic(result); }
/** * @param regionLastFlushedSequenceId the flushed sequence id of a region which is the min of its * store max seq ids * @param storeSequenceIds column family to sequence Id map * @return Serialized protobuf of <code>RegionSequenceIds</code> with pb magic prefix prepended * suitable for use to filter wal edits in distributedLogReplay mode */ public static byte[] regionSequenceIdsToByteArray(final Long regionLastFlushedSequenceId, final Map<byte[], Long> storeSequenceIds) { ClusterStatusProtos.RegionStoreSequenceIds.Builder regionSequenceIdsBuilder = ClusterStatusProtos.RegionStoreSequenceIds.newBuilder(); ClusterStatusProtos.StoreSequenceId.Builder storeSequenceIdBuilder = ClusterStatusProtos.StoreSequenceId.newBuilder(); if (storeSequenceIds != null) { for (Map.Entry<byte[], Long> e : storeSequenceIds.entrySet()){ byte[] columnFamilyName = e.getKey(); Long curSeqId = e.getValue(); storeSequenceIdBuilder.setFamilyName(ByteStringer.wrap(columnFamilyName)); storeSequenceIdBuilder.setSequenceId(curSeqId); regionSequenceIdsBuilder.addStoreSequenceId(storeSequenceIdBuilder.build()); storeSequenceIdBuilder.clear(); } } regionSequenceIdsBuilder.setLastFlushedSequenceId(regionLastFlushedSequenceId); byte[] result = regionSequenceIdsBuilder.build().toByteArray(); return ProtobufUtil.prependPBMagic(result); }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId) obj; boolean result = true; result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && (hasSequenceId() == other.hasSequenceId()); if (hasSequenceId()) { result = result && (getSequenceId() == other.getSequenceId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId) obj; boolean result = true; result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && (hasSequenceId() == other.hasSequenceId()); if (hasSequenceId()) { result = result && (getSequenceId() == other.getSequenceId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId)) { return super.equals(obj); } org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId other = (org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId) obj; boolean result = true; result = result && (hasFamilyName() == other.hasFamilyName()); if (hasFamilyName()) { result = result && getFamilyName() .equals(other.getFamilyName()); } result = result && (hasSequenceId() == other.hasSequenceId()); if (hasSequenceId()) { result = result && (getSequenceId() == other.getSequenceId()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (hasSequenceId()) { hash = (37 * hash) + SEQUENCE_ID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getSequenceId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasFamilyName()) { hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER; hash = (53 * hash) + getFamilyName().hashCode(); } if (hasSequenceId()) { hash = (37 * hash) + SEQUENCE_ID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getSequenceId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.familyName_ = familyName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.sequenceId_ = sequenceId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public Builder toBuilder() { return newBuilder(this); }
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.familyName_ = familyName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.sequenceId_ = sequenceId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public int getSerializedSize() { int size = memoizedSerializedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) == 0x00000001)) { size += com.facebook.presto.hbase.$internal.com.google.protobuf.CodedOutputStream .computeBytesSize(1, familyName_); } if (((bitField0_ & 0x00000002) == 0x00000002)) { size += com.facebook.presto.hbase.$internal.com.google.protobuf.CodedOutputStream .computeUInt64Size(2, sequenceId_); } size += getUnknownFields().getSerializedSize(); memoizedSerializedSize = size; return size; }
public org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId buildPartial() { org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId result = new org.apache.hadoop.hbase.protobuf.generated.ClusterStatusProtos.StoreSequenceId(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } result.familyName_ = familyName_; if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.sequenceId_ = sequenceId_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public RegionStoreSequenceIds getLastFlushedSequenceId(byte[] encodedRegionName) { RegionStoreSequenceIds.Builder builder = RegionStoreSequenceIds.newBuilder(); Long seqId = flushedSequenceIdByRegion.get(encodedRegionName); builder.setLastFlushedSequenceId(seqId != null ? seqId.longValue() : HConstants.NO_SEQNUM); Map<byte[], Long> storeFlushedSequenceId = storeFlushedSequenceIdsByRegion.get(encodedRegionName); if (storeFlushedSequenceId != null) { for (Map.Entry<byte[], Long> entry : storeFlushedSequenceId.entrySet()) { builder.addStoreSequenceId(StoreSequenceId.newBuilder() .setFamilyName(ByteString.copyFrom(entry.getKey())) .setSequenceId(entry.getValue().longValue()).build()); } } return builder.build(); }