public org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto buildPartial() { org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto result = new org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (keysBuilder_ == null) { result.keys_ = keys_; } else { result.keys_ = keysBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; }
keyUpdateCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto.getDefaultInstance(); } else { keyUpdateCmdBuilder_.clear();
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasKeys()) { hash = (37 * hash) + KEYS_FIELD_NUMBER; hash = (53 * hash) + getKeys().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
io.prestosql.hadoop.$internal.com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws io.prestosql.hadoop.$internal.com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; io.prestosql.hadoop.$internal.com.google.protobuf.UnknownFieldSet.Builder unknownFields = break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable();
com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { initFields(); int mutable_bitField0_ = 0; com.google.protobuf.UnknownFieldSet.Builder unknownFields = break; default: { if (!parseUnknownField(input, unknownFields, extensionRegistry, tag)) { done = true; } finally { this.unknownFields = unknownFields.build(); makeExtensionsImmutable();
keyUpdateCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto.getDefaultInstance(); } else { keyUpdateCmdBuilder_.clear();
if (hasKeyUpdateCmd()) { result = result && getKeyUpdateCmd() .equals(other.getKeyUpdateCmd());
if (hasKeyUpdateCmd()) { result = result && getKeyUpdateCmd() .equals(other.getKeyUpdateCmd());
keyUpdateCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto.getDefaultInstance(); } else { keyUpdateCmdBuilder_.clear();
hash = (53 * hash) + getKeyUpdateCmd().hashCode();
hash = (53 * hash) + getKeyUpdateCmd().hashCode();
/** * <code>optional .hadoop.hdfs.datanode.KeyUpdateCommandProto keyUpdateCmd = 6;</code> */ public Builder mergeKeyUpdateCmd(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto value) { if (keyUpdateCmdBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && keyUpdateCmd_ != org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto.getDefaultInstance()) { keyUpdateCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto.newBuilder(keyUpdateCmd_).mergeFrom(value).buildPartial(); } else { keyUpdateCmd_ = value; } onChanged(); } else { keyUpdateCmdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; return this; } /**
/** * <code>optional .hadoop.hdfs.datanode.KeyUpdateCommandProto keyUpdateCmd = 6;</code> */ public Builder mergeKeyUpdateCmd(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto value) { if (keyUpdateCmdBuilder_ == null) { if (((bitField0_ & 0x00000020) == 0x00000020) && keyUpdateCmd_ != org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto.getDefaultInstance()) { keyUpdateCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto.newBuilder(keyUpdateCmd_).mergeFrom(value).buildPartial(); } else { keyUpdateCmd_ = value; } onChanged(); } else { keyUpdateCmdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000020; return this; } /**
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto other = (org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto) obj; boolean result = true; result = result && (hasKeys() == other.hasKeys()); if (hasKeys()) { result = result && getKeys() .equals(other.getKeys()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto other = (org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto) obj; boolean result = true; result = result && (hasKeys() == other.hasKeys()); if (hasKeys()) { result = result && getKeys() .equals(other.getKeys()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
public org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto buildPartial() { org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto result = new org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (keysBuilder_ == null) { result.keys_ = keys_; } else { result.keys_ = keysBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto buildPartial() { org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto result = new org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (keysBuilder_ == null) { result.keys_ = keys_; } else { result.keys_ = keysBuilder_.build(); } result.bitField0_ = to_bitField0_; onBuilt(); return result; }
public static KeyUpdateCommand convert(KeyUpdateCommandProto keyUpdateCmd) { return new KeyUpdateCommand(PBHelper.convert(keyUpdateCmd.getKeys())); }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasKeys()) { hash = (37 * hash) + KEYS_FIELD_NUMBER; hash = (53 * hash) + getKeys().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto getDefaultInstanceForType() { return org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.KeyUpdateCommandProto.getDefaultInstance(); }