/** * <code>required .hadoop.hdfs.BlockProto key = 1;</code> */ public Builder setKey(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) { if (keyBuilder_ == null) { if (value == null) { throw new NullPointerException(); } key_ = value; onChanged(); } else { keyBuilder_.setMessage(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>required .hadoop.hdfs.BlockProto key = 1;</code> */ public org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder getKeyBuilder() { bitField0_ |= 0x00000001; onChanged(); return getKeyFieldBuilder().getBuilder(); } /**
/** * <code>required .hadoop.hdfs.BlockProto key = 1;</code> */ public Builder clearKey() { if (keyBuilder_ == null) { key_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance(); onChanged(); } else { keyBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } /**
/** * <code>required .hadoop.hdfs.BlockProto key = 1;</code> */ public Builder mergeKey(org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto value) { if (keyBuilder_ == null) { if (((bitField0_ & 0x00000001) == 0x00000001) && key_ != org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.getDefaultInstance()) { key_ = org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.newBuilder(key_).mergeFrom(value).buildPartial(); } else { key_ = value; } onChanged(); } else { keyBuilder_.mergeFrom(value); } bitField0_ |= 0x00000001; return this; } /**
/** * <code>required .hadoop.hdfs.BlockProto key = 1;</code> */ public Builder setKey( org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto.Builder builderForValue) { if (keyBuilder_ == null) { key_ = builderForValue.build(); onChanged(); } else { keyBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; return this; } /**