Tabnine Logo
ClusterStatusProtos$StoreSequenceId
Code IndexAdd Tabnine to your IDE (free)

How to use
ClusterStatusProtos$StoreSequenceId
in
org.apache.hadoop.hbase.shaded.protobuf.generated

Best Java code snippets using org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos$StoreSequenceId (Showing top 20 results out of 315)

origin: apache/hbase

Map<byte[], Long> maxSeqIdInStores = new TreeMap<>(Bytes.BYTES_COMPARATOR);
for (StoreSequenceId storeSeqId : ids.getStoreSequenceIdList()) {
 maxSeqIdInStores.put(storeSeqId.getFamilyName().toByteArray(),
  storeSeqId.getSequenceId());
origin: org.apache.hbase/hbase-client

private static List<ClusterStatusProtos.StoreSequenceId> toStoreSequenceId(
  Map<byte[], Long> ids) {
 return ids.entrySet().stream()
   .map(e -> ClusterStatusProtos.StoreSequenceId.newBuilder()
    .setFamilyName(UnsafeByteOperations.unsafeWrap(e.getKey()))
    .setSequenceId(e.getValue())
    .build())
   .collect(Collectors.toList());
}
origin: apache/hbase

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptor().hashCode();
 if (hasFamilyName()) {
  hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER;
  hash = (53 * hash) + getFamilyName().hashCode();
 }
 if (hasSequenceId()) {
  hash = (37 * hash) + SEQUENCE_ID_FIELD_NUMBER;
  hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
    getSequenceId());
 }
 hash = (29 * hash) + unknownFields.hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: org.apache.hbase/hbase-server

assertEquals(HConstants.NO_SEQNUM, ids.getLastFlushedSequenceId());
long storeSequenceId = ids.getStoreSequenceId(0).getSequenceId();
assertTrue(storeSequenceId > 0);
testUtil.getAdmin().flush(tableName);
assertTrue(ids.getLastFlushedSequenceId() + " > " + storeSequenceId,
 ids.getLastFlushedSequenceId() > storeSequenceId);
assertEquals(ids.getLastFlushedSequenceId(), ids.getStoreSequenceId(0).getSequenceId());
table.close();
origin: com.aliyun.hbase/alihbase-client

public static RegionMetrics toRegionMetrics(ClusterStatusProtos.RegionLoad regionLoadPB) {
 return RegionMetricsBuilder
   .newBuilder(regionLoadPB.getRegionSpecifier().getValue().toByteArray())
   .setBloomFilterSize(new Size(regionLoadPB.getTotalStaticBloomSizeKB(), Size.Unit.KILOBYTE))
   .setCompactedCellCount(regionLoadPB.getCurrentCompactedKVs())
   .setCompactingCellCount(regionLoadPB.getTotalCompactingKVs())
   .setCompletedSequenceId(regionLoadPB.getCompleteSequenceId())
   .setDataLocality(regionLoadPB.hasDataLocality() ? regionLoadPB.getDataLocality() : 0.0f)
   .setFilteredReadRequestCount(regionLoadPB.getFilteredReadRequestsCount())
   .setStoreFileUncompressedDataIndexSize(new Size(regionLoadPB.getTotalStaticIndexSizeKB(),
    Size.Unit.KILOBYTE))
   .setLastMajorCompactionTimestamp(regionLoadPB.getLastMajorCompactionTs())
   .setMemStoreSize(new Size(regionLoadPB.getMemStoreSizeMB(), Size.Unit.MEGABYTE))
   .setReadRequestCount(regionLoadPB.getReadRequestsCount())
   .setWriteRequestCount(regionLoadPB.getWriteRequestsCount())
   .setStoreFileIndexSize(new Size(regionLoadPB.getStorefileIndexSizeKB(),
    Size.Unit.KILOBYTE))
   .setStoreFileRootLevelIndexSize(new Size(regionLoadPB.getRootIndexSizeKB(),
    Size.Unit.KILOBYTE))
   .setStoreCount(regionLoadPB.getStores())
   .setStoreFileCount(regionLoadPB.getStorefiles())
   .setStoreFileSize(new Size(regionLoadPB.getStorefileSizeMB(), Size.Unit.MEGABYTE))
   .setStoreSequenceIds(regionLoadPB.getStoreCompleteSequenceIdList().stream()
    .collect(Collectors.toMap(
     (ClusterStatusProtos.StoreSequenceId s) -> s.getFamilyName().toByteArray(),
      ClusterStatusProtos.StoreSequenceId::getSequenceId)))
   .setUncompressedStoreFileSize(
    new Size(regionLoadPB.getStoreUncompressedSizeMB(),Size.Unit.MEGABYTE))
   .build();
}
origin: org.apache.hbase/hbase-protocol-shaded

@java.lang.Override
public int hashCode() {
 if (memoizedHashCode != 0) {
  return memoizedHashCode;
 }
 int hash = 41;
 hash = (19 * hash) + getDescriptor().hashCode();
 if (hasFamilyName()) {
  hash = (37 * hash) + FAMILY_NAME_FIELD_NUMBER;
  hash = (53 * hash) + getFamilyName().hashCode();
 }
 if (hasSequenceId()) {
  hash = (37 * hash) + SEQUENCE_ID_FIELD_NUMBER;
  hash = (53 * hash) + org.apache.hbase.thirdparty.com.google.protobuf.Internal.hashLong(
    getSequenceId());
 }
 hash = (29 * hash) + unknownFields.hashCode();
 memoizedHashCode = hash;
 return hash;
}
origin: org.apache.hbase/hbase-protocol-shaded

@java.lang.Override
public boolean equals(final java.lang.Object obj) {
 if (obj == this) {
  return true;
 }
 if (!(obj instanceof org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId)) {
  return super.equals(obj);
 }
 org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId other = (org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId) obj;
 boolean result = true;
 result = result && (hasFamilyName() == other.hasFamilyName());
 if (hasFamilyName()) {
  result = result && getFamilyName()
    .equals(other.getFamilyName());
 }
 result = result && (hasSequenceId() == other.hasSequenceId());
 if (hasSequenceId()) {
  result = result && (getSequenceId()
    == other.getSequenceId());
 }
 result = result && unknownFields.equals(other.unknownFields);
 return result;
}
origin: org.apache.hbase/hbase-protocol-shaded

public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
origin: org.apache.hbase/hbase-protocol-shaded

public final boolean isInitialized() {
 byte isInitialized = memoizedIsInitialized;
 if (isInitialized == 1) return true;
 if (isInitialized == 0) return false;
 if (!hasFamilyName()) {
  memoizedIsInitialized = 0;
  return false;
 }
 if (!hasSequenceId()) {
  memoizedIsInitialized = 0;
  return false;
 }
 memoizedIsInitialized = 1;
 return true;
}
origin: org.apache.hbase/hbase-protocol-shaded

public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId buildPartial() {
 org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId result = new org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId(this);
 int from_bitField0_ = bitField0_;
 int to_bitField0_ = 0;
 if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
  to_bitField0_ |= 0x00000001;
 }
 result.familyName_ = familyName_;
 if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
  to_bitField0_ |= 0x00000002;
 }
 result.sequenceId_ = sequenceId_;
 result.bitField0_ = to_bitField0_;
 onBuilt();
 return result;
}
origin: org.apache.hbase/hbase-protocol-shaded

public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId getDefaultInstanceForType() {
 return org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance();
}
origin: com.aliyun.hbase/alihbase-client

/**
 * @return completed sequence id per store.
 * @deprecated As of release 2.0.0, this will be removed in HBase 3.0.0
 *             Use {@link #getStoreSequenceId} instead.
 */
@Deprecated
public List<ClusterStatusProtos.StoreSequenceId> getStoreCompleteSequenceId() {
 return metrics.getStoreSequenceId().entrySet().stream()
   .map(s -> ClusterStatusProtos.StoreSequenceId.newBuilder()
        .setFamilyName(UnsafeByteOperations.unsafeWrap(s.getKey()))
        .setSequenceId(s.getValue())
        .build())
   .collect(Collectors.toList());
}
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <pre>
 ** the most recent sequence Id of store from cache flush 
 * </pre>
 *
 * <code>repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18;</code>
 */
public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreCompleteSequenceIdBuilder() {
 return getStoreCompleteSequenceIdFieldBuilder().addBuilder(
   org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance());
}
/**
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <code>repeated .hbase.pb.StoreSequenceId store_sequence_id = 2;</code>
 */
public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreSequenceIdBuilder() {
 return getStoreSequenceIdFieldBuilder().addBuilder(
   org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance());
}
/**
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <pre>
 ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
 * </pre>
 *
 * <code>repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2;</code>
 */
public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreLastFlushedSequenceIdBuilder() {
 return getStoreLastFlushedSequenceIdFieldBuilder().addBuilder(
   org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance());
}
/**
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <pre>
 ** the last WAL sequence id flushed from MemStore to HFile for stores of the region 
 * </pre>
 *
 * <code>repeated .hbase.pb.StoreSequenceId store_last_flushed_sequence_id = 2;</code>
 */
public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreLastFlushedSequenceIdBuilder(
  int index) {
 return getStoreLastFlushedSequenceIdFieldBuilder().addBuilder(
   index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance());
}
/**
origin: org.apache.hbase/hbase-protocol-shaded

public Builder mergeFrom(org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId other) {
 if (other == org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance()) return this;
 if (other.hasFamilyName()) {
  setFamilyName(other.getFamilyName());
 }
 if (other.hasSequenceId()) {
  setSequenceId(other.getSequenceId());
 }
 this.mergeUnknownFields(other.unknownFields);
 onChanged();
 return this;
}
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <pre>
 ** the most recent sequence Id of store from cache flush 
 * </pre>
 *
 * <code>repeated .hbase.pb.StoreSequenceId store_complete_sequence_id = 18;</code>
 */
public org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.Builder addStoreCompleteSequenceIdBuilder(
  int index) {
 return getStoreCompleteSequenceIdFieldBuilder().addBuilder(
   index, org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId.getDefaultInstance());
}
/**
origin: org.apache.hbase/hbase-protocol-shaded

/**
 * <code>required bytes family_name = 1;</code>
 */
public Builder clearFamilyName() {
 bitField0_ = (bitField0_ & ~0x00000001);
 familyName_ = getDefaultInstance().getFamilyName();
 onChanged();
 return this;
}
origin: com.aliyun.hbase/alihbase-client

private static List<ClusterStatusProtos.StoreSequenceId> toStoreSequenceId(
  Map<byte[], Long> ids) {
 return ids.entrySet().stream()
   .map(e -> ClusterStatusProtos.StoreSequenceId.newBuilder()
    .setFamilyName(UnsafeByteOperations.unsafeWrap(e.getKey()))
    .setSequenceId(e.getValue())
    .build())
   .collect(Collectors.toList());
}
org.apache.hadoop.hbase.shaded.protobuf.generatedClusterStatusProtos$StoreSequenceId

Javadoc

 
sequence Id of a store 
Protobuf type hbase.pb.StoreSequenceId

Most used methods

  • getFamilyName
    required bytes family_name = 1;
  • newBuilder
  • getSequenceId
    required uint64 sequence_id = 2;
  • <init>
  • getDefaultInstance
  • getDescriptor
  • hasFamilyName
    required bytes family_name = 1;
  • hasSequenceId
    required uint64 sequence_id = 2;
  • isInitialized
  • makeExtensionsImmutable
  • parseUnknownField
  • toBuilder
  • parseUnknownField,
  • toBuilder

Popular in Java

  • Running tasks concurrently on multiple threads
  • orElseThrow (Optional)
    Return the contained value, if present, otherwise throw an exception to be created by the provided s
  • scheduleAtFixedRate (ScheduledExecutorService)
  • setScale (BigDecimal)
  • Menu (java.awt)
  • System (java.lang)
    Provides access to system-related information and resources including standard input and output. Ena
  • SQLException (java.sql)
    An exception that indicates a failed JDBC operation. It provides the following information about pro
  • Stream (java.util.stream)
    A sequence of elements supporting sequential and parallel aggregate operations. The following exampl
  • Cipher (javax.crypto)
    This class provides access to implementations of cryptographic ciphers for encryption and decryption
  • Reference (javax.naming)
  • CodeWhisperer alternatives
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now