Tabnine Logo
WALProtos$WALKey
Code IndexAdd Tabnine to your IDE (free)

How to use
WALProtos$WALKey
in
org.apache.hadoop.hbase.protobuf.generated

Best Java code snippets using org.apache.hadoop.hbase.protobuf.generated.WALProtos$WALKey (Showing top 20 results out of 315)

origin: apache/hbase

public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) {
 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this;
 if (other.hasEncodedRegionName()) {
  setEncodedRegionName(other.getEncodedRegionName());
 if (other.hasTableName()) {
  setTableName(other.getTableName());
 if (other.hasLogSequenceNumber()) {
  setLogSequenceNumber(other.getLogSequenceNumber());
 if (other.hasWriteTime()) {
  setWriteTime(other.getWriteTime());
 if (other.hasClusterId()) {
  mergeClusterId(other.getClusterId());
 if (other.hasFollowingKvCount()) {
  setFollowingKvCount(other.getFollowingKvCount());
 if (other.hasNonceGroup()) {
  setNonceGroup(other.getNonceGroup());
 if (other.hasNonce()) {
  setNonce(other.getNonce());
 if (other.hasOrigSequenceNumber()) {
  setOrigSequenceNumber(other.getOrigSequenceNumber());
 this.mergeUnknownFields(other.getUnknownFields());
origin: apache/hbase

public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() {
 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this);
 int from_bitField0_ = bitField0_;
 int to_bitField0_ = 0;
origin: apache/hbase

@java.lang.Override
public boolean equals(final java.lang.Object obj) {
 if (obj == this) {
  return true;
 }
 if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry)) {
  return super.equals(obj);
 }
 org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry other = (org.apache.hadoop.hbase.protobuf.generated.AdminProtos.WALEntry) obj;
 boolean result = true;
 result = result && (hasKey() == other.hasKey());
 if (hasKey()) {
  result = result && getKey()
    .equals(other.getKey());
 }
 result = result && getKeyValueBytesList()
   .equals(other.getKeyValueBytesList());
 result = result && (hasAssociatedCellCount() == other.hasAssociatedCellCount());
 if (hasAssociatedCellCount()) {
  result = result && (getAssociatedCellCount()
    == other.getAssociatedCellCount());
 }
 result = result &&
   getUnknownFields().equals(other.getUnknownFields());
 return result;
}
origin: harbby/presto-connectors

ByteString regionName = entries.get(0).getKey().getEncodedRegionName();
Region region = regionServer.getRegionByEncodedName(regionName.toStringUtf8());
RegionCoprocessorHost coprocessorHost =
 if (!regionName.equals(entry.getKey().getEncodedRegionName())) {
  throw new NotServingRegionException("Replay request contains entries from multiple " +
    "regions. First region:" + regionName.toStringUtf8() + " , other region:"
    + entry.getKey().getEncodedRegionName());
  long nonceGroup = entry.getKey().hasNonceGroup()
   ? entry.getKey().getNonceGroup() : HConstants.NO_NONCE;
  long nonce = entry.getKey().hasNonce() ? entry.getKey().getNonce() : HConstants.NO_NONCE;
  regionServer.nonceManager.reportOperationFromWal(
    nonceGroup,
    nonce,
    entry.getKey().getWriteTime());
  long replaySeqId = (entry.getKey().hasOrigSequenceNumber()) ?
   entry.getKey().getOrigSequenceNumber() : entry.getKey().getLogSequenceNumber();
  OperationStatus[] result = doReplayBatchOp(region, edits, replaySeqId);
origin: com.aliyun.hbase/alihbase-protocol

public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) {
 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this;
 if (other.hasEncodedRegionName()) {
  setEncodedRegionName(other.getEncodedRegionName());
 if (other.hasTableName()) {
  setTableName(other.getTableName());
 if (other.hasLogSequenceNumber()) {
  setLogSequenceNumber(other.getLogSequenceNumber());
 if (other.hasWriteTime()) {
  setWriteTime(other.getWriteTime());
 if (other.hasClusterId()) {
  mergeClusterId(other.getClusterId());
 if (other.hasFollowingKvCount()) {
  setFollowingKvCount(other.getFollowingKvCount());
 if (other.hasNonceGroup()) {
  setNonceGroup(other.getNonceGroup());
 if (other.hasNonce()) {
  setNonce(other.getNonce());
 if (other.hasOrigSequenceNumber()) {
  setOrigSequenceNumber(other.getOrigSequenceNumber());
 this.mergeUnknownFields(other.getUnknownFields());
origin: harbby/presto-connectors

long replaySeqId = (entry.getKey().hasOrigSequenceNumber()) ?
 entry.getKey().getOrigSequenceNumber() : entry.getKey().getLogSequenceNumber();
int count = entry.getAssociatedCellCount();
List<MutationReplay> mutations = new ArrayList<MutationReplay>();
   m = new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
   long nonceGroup = entry.getKey().hasNonceGroup()
     ? entry.getKey().getNonceGroup() : HConstants.NO_NONCE;
   long nonce = entry.getKey().hasNonce() ? entry.getKey().getNonce() : HConstants.NO_NONCE;
   mutations.add(new MutationReplay(MutationType.PUT, m, nonceGroup, nonce));
 List<UUID> clusterIds = new ArrayList<UUID>(walKeyProto.getClusterIdsCount());
 for (HBaseProtos.UUID uuid : entry.getKey().getClusterIdsList()) {
  clusterIds.add(new UUID(uuid.getMostSigBits(), uuid.getLeastSigBits()));
 key = new HLogKey(walKeyProto.getEncodedRegionName().toByteArray(), TableName.valueOf(
     walKeyProto.getTableName().toByteArray()), replaySeqId, walKeyProto.getWriteTime(),
     clusterIds, walKeyProto.getNonceGroup(), walKeyProto.getNonce(), null);
 logEntry.setFirst(key);
 logEntry.setSecond(val);
origin: harbby/presto-connectors

public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) {
 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this;
 if (other.hasEncodedRegionName()) {
  setEncodedRegionName(other.getEncodedRegionName());
 if (other.hasTableName()) {
  setTableName(other.getTableName());
 if (other.hasLogSequenceNumber()) {
  setLogSequenceNumber(other.getLogSequenceNumber());
 if (other.hasWriteTime()) {
  setWriteTime(other.getWriteTime());
 if (other.hasClusterId()) {
  mergeClusterId(other.getClusterId());
 if (other.hasFollowingKvCount()) {
  setFollowingKvCount(other.getFollowingKvCount());
 if (other.hasNonceGroup()) {
  setNonceGroup(other.getNonceGroup());
 if (other.hasNonce()) {
  setNonce(other.getNonce());
 if (other.hasOrigSequenceNumber()) {
  setOrigSequenceNumber(other.getOrigSequenceNumber());
 this.mergeUnknownFields(other.getUnknownFields());
origin: harbby/presto-connectors

public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() {
 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this);
 int from_bitField0_ = bitField0_;
 int to_bitField0_ = 0;
origin: org.apache.hbase/hbase-protocol

public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey other) {
 if (other == org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey.getDefaultInstance()) return this;
 if (other.hasEncodedRegionName()) {
  setEncodedRegionName(other.getEncodedRegionName());
 if (other.hasTableName()) {
  setTableName(other.getTableName());
 if (other.hasLogSequenceNumber()) {
  setLogSequenceNumber(other.getLogSequenceNumber());
 if (other.hasWriteTime()) {
  setWriteTime(other.getWriteTime());
 if (other.hasClusterId()) {
  mergeClusterId(other.getClusterId());
 if (other.hasFollowingKvCount()) {
  setFollowingKvCount(other.getFollowingKvCount());
 if (other.hasNonceGroup()) {
  setNonceGroup(other.getNonceGroup());
 if (other.hasNonce()) {
  setNonce(other.getNonce());
 if (other.hasOrigSequenceNumber()) {
  setOrigSequenceNumber(other.getOrigSequenceNumber());
 this.mergeUnknownFields(other.getUnknownFields());
origin: harbby/presto-connectors

for (WALEntry entry : entries) {
 TableName table =
   TableName.valueOf(entry.getKey().getTableName().toByteArray());
 Cell previousCell = null;
 Mutation m = null;
    new Put(cell.getRowArray(), cell.getRowOffset(), cell.getRowLength());
   List<UUID> clusterIds = new ArrayList<UUID>();
   for(HBaseProtos.UUID clusterId : entry.getKey().getClusterIdsList()){
    clusterIds.add(toUUID(clusterId));
this.metrics.setAgeOfLastAppliedOp(entries.get(size - 1).getKey().getWriteTime());
this.metrics.applyBatch(size);
this.totalReplicatedEdits.addAndGet(totalReplicated);
origin: com.aliyun.hbase/alihbase-protocol

public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() {
 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this);
 int from_bitField0_ = bitField0_;
 int to_bitField0_ = 0;
origin: org.apache.hbase/hbase-protocol

public org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey buildPartial() {
 org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey result = new org.apache.hadoop.hbase.protobuf.generated.WALProtos.WALKey(this);
 int from_bitField0_ = bitField0_;
 int to_bitField0_ = 0;
origin: harbby/presto-connectors

result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
if (hasEncodedRegionName()) {
 result = result && getEncodedRegionName()
   .equals(other.getEncodedRegionName());
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
 result = result && getTableName()
   .equals(other.getTableName());
result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
if (hasLogSequenceNumber()) {
 result = result && (getLogSequenceNumber()
   == other.getLogSequenceNumber());
result = result && (hasWriteTime() == other.hasWriteTime());
if (hasWriteTime()) {
 result = result && (getWriteTime()
   == other.getWriteTime());
result = result && (hasClusterId() == other.hasClusterId());
if (hasClusterId()) {
 result = result && getClusterId()
   .equals(other.getClusterId());
result = result && getScopesList()
  .equals(other.getScopesList());
result = result && (hasFollowingKvCount() == other.hasFollowingKvCount());
if (hasFollowingKvCount()) {
origin: com.aliyun.hbase/alihbase-protocol

result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
if (hasEncodedRegionName()) {
 result = result && getEncodedRegionName()
   .equals(other.getEncodedRegionName());
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
 result = result && getTableName()
   .equals(other.getTableName());
result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
if (hasLogSequenceNumber()) {
 result = result && (getLogSequenceNumber()
   == other.getLogSequenceNumber());
result = result && (hasWriteTime() == other.hasWriteTime());
if (hasWriteTime()) {
 result = result && (getWriteTime()
   == other.getWriteTime());
result = result && (hasClusterId() == other.hasClusterId());
if (hasClusterId()) {
 result = result && getClusterId()
   .equals(other.getClusterId());
result = result && getScopesList()
  .equals(other.getScopesList());
result = result && (hasFollowingKvCount() == other.hasFollowingKvCount());
if (hasFollowingKvCount()) {
origin: org.apache.hbase/hbase-protocol

result = result && (hasEncodedRegionName() == other.hasEncodedRegionName());
if (hasEncodedRegionName()) {
 result = result && getEncodedRegionName()
   .equals(other.getEncodedRegionName());
result = result && (hasTableName() == other.hasTableName());
if (hasTableName()) {
 result = result && getTableName()
   .equals(other.getTableName());
result = result && (hasLogSequenceNumber() == other.hasLogSequenceNumber());
if (hasLogSequenceNumber()) {
 result = result && (getLogSequenceNumber()
   == other.getLogSequenceNumber());
result = result && (hasWriteTime() == other.hasWriteTime());
if (hasWriteTime()) {
 result = result && (getWriteTime()
   == other.getWriteTime());
result = result && (hasClusterId() == other.hasClusterId());
if (hasClusterId()) {
 result = result && getClusterId()
   .equals(other.getClusterId());
result = result && getScopesList()
  .equals(other.getScopesList());
result = result && (hasFollowingKvCount() == other.hasFollowingKvCount());
if (hasFollowingKvCount()) {
origin: NGDATA/hbase-indexer

TableName tableName = (entry.getKey().getWriteTime() < subscriptionTimestamp) ? null :
    TableName.valueOf(entry.getKey().getTableName().toByteArray());
Multimap<ByteBuffer, Cell> keyValuesPerRowKey = ArrayListMultimap.create();
final Map<ByteBuffer, byte[]> payloadPerRowKey = Maps.newHashMap();
      payloadPerRowKey.get(rowKeyBuffer));
  eventExecutor.scheduleSepEvent(sepEvent);
  lastProcessedTimestamp = Math.max(lastProcessedTimestamp, entry.getKey().getWriteTime());
origin: com.ngdata/hbase-sep-impl

TableName tableName = (entry.getKey().getWriteTime() < subscriptionTimestamp) ? null :
           TableName.valueOf(entry.getKey().getTableName().toByteArray());
Multimap<ByteBuffer, KeyValue> keyValuesPerRowKey = ArrayListMultimap.create();
final Map<ByteBuffer, byte[]> payloadPerRowKey = Maps.newHashMap();
      payloadPerRowKey.get(rowKeyBuffer));
  eventExecutor.scheduleSepEvent(sepEvent);
  lastProcessedTimestamp = Math.max(lastProcessedTimestamp, entry.getKey().getWriteTime());
origin: harbby/presto-connectors

hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasEncodedRegionName()) {
 hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
 hash = (53 * hash) + getEncodedRegionName().hashCode();
if (hasTableName()) {
 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
 hash = (53 * hash) + getTableName().hashCode();
if (hasLogSequenceNumber()) {
 hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
 hash = (53 * hash) + hashLong(getLogSequenceNumber());
if (hasWriteTime()) {
 hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER;
 hash = (53 * hash) + hashLong(getWriteTime());
if (hasClusterId()) {
 hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER;
 hash = (53 * hash) + getClusterId().hashCode();
if (getScopesCount() > 0) {
 hash = (37 * hash) + SCOPES_FIELD_NUMBER;
 hash = (53 * hash) + getScopesList().hashCode();
if (hasFollowingKvCount()) {
 hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER;
 hash = (53 * hash) + getFollowingKvCount();
origin: com.aliyun.hbase/alihbase-protocol

hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasEncodedRegionName()) {
 hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
 hash = (53 * hash) + getEncodedRegionName().hashCode();
if (hasTableName()) {
 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
 hash = (53 * hash) + getTableName().hashCode();
if (hasLogSequenceNumber()) {
 hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
 hash = (53 * hash) + hashLong(getLogSequenceNumber());
if (hasWriteTime()) {
 hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER;
 hash = (53 * hash) + hashLong(getWriteTime());
if (hasClusterId()) {
 hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER;
 hash = (53 * hash) + getClusterId().hashCode();
if (getScopesCount() > 0) {
 hash = (37 * hash) + SCOPES_FIELD_NUMBER;
 hash = (53 * hash) + getScopesList().hashCode();
if (hasFollowingKvCount()) {
 hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER;
 hash = (53 * hash) + getFollowingKvCount();
origin: org.apache.hbase/hbase-protocol

hash = (19 * hash) + getDescriptorForType().hashCode();
if (hasEncodedRegionName()) {
 hash = (37 * hash) + ENCODED_REGION_NAME_FIELD_NUMBER;
 hash = (53 * hash) + getEncodedRegionName().hashCode();
if (hasTableName()) {
 hash = (37 * hash) + TABLE_NAME_FIELD_NUMBER;
 hash = (53 * hash) + getTableName().hashCode();
if (hasLogSequenceNumber()) {
 hash = (37 * hash) + LOG_SEQUENCE_NUMBER_FIELD_NUMBER;
 hash = (53 * hash) + hashLong(getLogSequenceNumber());
if (hasWriteTime()) {
 hash = (37 * hash) + WRITE_TIME_FIELD_NUMBER;
 hash = (53 * hash) + hashLong(getWriteTime());
if (hasClusterId()) {
 hash = (37 * hash) + CLUSTER_ID_FIELD_NUMBER;
 hash = (53 * hash) + getClusterId().hashCode();
if (getScopesCount() > 0) {
 hash = (37 * hash) + SCOPES_FIELD_NUMBER;
 hash = (53 * hash) + getScopesList().hashCode();
if (hasFollowingKvCount()) {
 hash = (37 * hash) + FOLLOWING_KV_COUNT_FIELD_NUMBER;
 hash = (53 * hash) + getFollowingKvCount();
org.apache.hadoop.hbase.protobuf.generatedWALProtos$WALKey

Javadoc

Protobuf type hbase.pb.WALKey
 
Protocol buffer version of WALKey; see WALKey comment, not really a key but WALEdit header 
for some KVs 

Most used methods

  • getTableName
    required bytes table_name = 2;
  • getWriteTime
    required uint64 write_time = 4;
  • getEncodedRegionName
    required bytes encoded_region_name = 1;
  • <init>
  • equals
  • getClusterId
    optional .hbase.pb.UUID cluster_id = 5 [deprecated = true]; This parameter is deprecated in favor o
  • getClusterIds
    repeated .hbase.pb.UUID cluster_ids = 8; This field contains the list of clusters that have consum
  • getClusterIdsCount
    repeated .hbase.pb.UUID cluster_ids = 8; This field contains the list of clusters that have consum
  • getClusterIdsList
    repeated .hbase.pb.UUID cluster_ids = 8; This field contains the list of clusters that have consum
  • getDefaultInstance
  • getDescriptorForType
  • getFollowingKvCount
    optional uint32 following_kv_count = 7;
  • getDescriptorForType,
  • getFollowingKvCount,
  • getLogSequenceNumber,
  • getNonce,
  • getNonceGroup,
  • getOrigSequenceNumber,
  • getScopes,
  • getScopesCount,
  • getScopesList,
  • getSerializedSize

Popular in Java

  • Finding current android device location
  • addToBackStack (FragmentTransaction)
  • scheduleAtFixedRate (Timer)
  • getExternalFilesDir (Context)
  • FileOutputStream (java.io)
    An output stream that writes bytes to a file. If the output file exists, it can be replaced or appen
  • FileReader (java.io)
    A specialized Reader that reads from a file in the file system. All read requests made by calling me
  • Thread (java.lang)
    A thread is a thread of execution in a program. The Java Virtual Machine allows an application to ha
  • ArrayList (java.util)
    ArrayList is an implementation of List, backed by an array. All optional operations including adding
  • Executors (java.util.concurrent)
    Factory and utility methods for Executor, ExecutorService, ScheduledExecutorService, ThreadFactory,
  • Cipher (javax.crypto)
    This class provides access to implementations of cryptographic ciphers for encryption and decryption
  • Top 12 Jupyter Notebook extensions
Tabnine Logo
  • Products

    Search for Java codeSearch for JavaScript code
  • IDE Plugins

    IntelliJ IDEAWebStormVisual StudioAndroid StudioEclipseVisual Studio CodePyCharmSublime TextPhpStormVimGoLandRubyMineEmacsJupyter NotebookJupyter LabRiderDataGripAppCode
  • Company

    About UsContact UsCareers
  • Resources

    FAQBlogTabnine AcademyTerms of usePrivacy policyJava Code IndexJavascript Code Index
Get Tabnine for your IDE now