private void writeSuccessWithChecksumInfo(BlockSender blockSender, DataOutputStream out) throws IOException { ReadOpChecksumInfoProto ckInfo = ReadOpChecksumInfoProto.newBuilder() .setChecksum(DataTransferProtoUtil.toProto(blockSender.getChecksum())) .setChunkOffset(blockSender.getOffset()) .build(); BlockOpResponseProto response = BlockOpResponseProto.newBuilder() .setStatus(SUCCESS) .setReadOpChecksumInfo(ckInfo) .build(); response.writeDelimitedTo(out); out.flush(); }
/** * <code>optional .hadoop.hdfs.ReadOpChecksumInfoProto readOpChecksumInfo = 4;</code> */ public Builder mergeReadOpChecksumInfo(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto value) { if (readOpChecksumInfoBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && readOpChecksumInfo_ != org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) { readOpChecksumInfo_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.newBuilder(readOpChecksumInfo_).mergeFrom(value).buildPartial(); } else { readOpChecksumInfo_ = value; } onChanged(); } else { readOpChecksumInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /**
/** * <code>optional .hadoop.hdfs.ReadOpChecksumInfoProto readOpChecksumInfo = 4;</code> */ public Builder mergeReadOpChecksumInfo(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto value) { if (readOpChecksumInfoBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && readOpChecksumInfo_ != org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) { readOpChecksumInfo_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.newBuilder(readOpChecksumInfo_).mergeFrom(value).buildPartial(); } else { readOpChecksumInfo_ = value; } onChanged(); } else { readOpChecksumInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /**
.setReadOpChecksumInfo(ReadOpChecksumInfoProto.newBuilder() .setChecksum(DataTransferProtoUtil.toProto(DEFAULT_CHECKSUM)) .setChunkOffset(0L))
public Builder toBuilder() { return newBuilder(this); }
/** * <code>optional .hadoop.hdfs.ReadOpChecksumInfoProto readOpChecksumInfo = 4;</code> */ public Builder mergeReadOpChecksumInfo(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto value) { if (readOpChecksumInfoBuilder_ == null) { if (((bitField0_ & 0x00000008) == 0x00000008) && readOpChecksumInfo_ != org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.getDefaultInstance()) { readOpChecksumInfo_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto.newBuilder(readOpChecksumInfo_).mergeFrom(value).buildPartial(); } else { readOpChecksumInfo_ = value; } onChanged(); } else { readOpChecksumInfoBuilder_.mergeFrom(value); } bitField0_ |= 0x00000008; return this; } /**
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto prototype) {
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto prototype) {
public Builder toBuilder() { return newBuilder(this); }
private void writeSuccessWithChecksumInfo(BlockSender blockSender, DataOutputStream out) throws IOException { ReadOpChecksumInfoProto ckInfo = ReadOpChecksumInfoProto.newBuilder() .setChecksum(DataTransferProtoUtil.toProto(blockSender.getChecksum())) .setChunkOffset(blockSender.getOffset()) .build(); BlockOpResponseProto response = BlockOpResponseProto.newBuilder() .setStatus(SUCCESS) .setReadOpChecksumInfo(ckInfo) .build(); response.writeDelimitedTo(out); out.flush(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
private void writeSuccessWithChecksumInfo(BlockSender blockSender, DataOutputStream out) throws IOException { ReadOpChecksumInfoProto ckInfo = ReadOpChecksumInfoProto.newBuilder() .setChecksum(DataTransferProtoUtil.toProto(blockSender.getChecksum())) .setChunkOffset(blockSender.getOffset()) .build(); BlockOpResponseProto response = BlockOpResponseProto.newBuilder() .setStatus(SUCCESS) .setReadOpChecksumInfo(ckInfo) .build(); response.writeDelimitedTo(out); out.flush(); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }
public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto prototype) {
public Builder toBuilder() { return newBuilder(this); }
public static Builder newBuilder(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ReadOpChecksumInfoProto prototype) { return newBuilder().mergeFrom(prototype); } public Builder toBuilder() { return newBuilder(this); }