/** * <code>optional .hadoop.hdfs.datanode.BlockECReconstructionCommandProto blkECReconstructionCmd = 9;</code> */ public Builder mergeBlkECReconstructionCmd(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto value) { if (blkECReconstructionCmdBuilder_ == null) { if (((bitField0_ & 0x00000100) == 0x00000100) && blkECReconstructionCmd_ != org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto.getDefaultInstance()) { blkECReconstructionCmd_ = org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto.newBuilder(blkECReconstructionCmd_).mergeFrom(value).buildPartial(); } else { blkECReconstructionCmd_ = value; } onChanged(); } else { blkECReconstructionCmdBuilder_.mergeFrom(value); } bitField0_ |= 0x00000100; return this; } /**
public Builder toBuilder() { return newBuilder(this); }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto prototype) {
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.BlockECReconstructionCommandProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
public static BlockECReconstructionCommandProto convert( BlockECReconstructionCommand blkECReconstructionCmd) { BlockECReconstructionCommandProto.Builder builder = BlockECReconstructionCommandProto.newBuilder(); Collection<BlockECReconstructionInfo> blockECRInfos = blkECReconstructionCmd.getECTasks(); for (BlockECReconstructionInfo blkECReconstructInfo : blockECRInfos) { builder.addBlockECReconstructioninfo( convertBlockECRecoveryInfo(blkECReconstructInfo)); } return builder.build(); }