public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.getDefaultInstance()) return this; if (other.hasHeader()) { mergeHeader(other.getHeader()); if (other.hasSource()) { mergeSource(other.getSource()); if (other.hasStage()) { setStage(other.getStage()); if (other.hasPipelineSize()) { setPipelineSize(other.getPipelineSize()); if (other.hasMinBytesRcvd()) { setMinBytesRcvd(other.getMinBytesRcvd()); if (other.hasMaxBytesRcvd()) { setMaxBytesRcvd(other.getMaxBytesRcvd()); if (other.hasLatestGenerationStamp()) { setLatestGenerationStamp(other.getLatestGenerationStamp()); if (other.hasRequestedChecksum()) { mergeRequestedChecksum(other.getRequestedChecksum()); if (other.hasCachingStrategy()) { mergeCachingStrategy(other.getCachingStrategy()); if (other.hasStorageType()) {
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, header_); output.writeBytes(16, getStorageIdBytes()); getUnknownFields().writeTo(output);
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.getDefaultInstance()) return this; if (other.hasHeader()) { mergeHeader(other.getHeader()); if (other.hasSource()) { mergeSource(other.getSource()); if (other.hasStage()) { setStage(other.getStage()); if (other.hasPipelineSize()) { setPipelineSize(other.getPipelineSize()); if (other.hasMinBytesRcvd()) { setMinBytesRcvd(other.getMinBytesRcvd()); if (other.hasMaxBytesRcvd()) { setMaxBytesRcvd(other.getMaxBytesRcvd()); if (other.hasLatestGenerationStamp()) { setLatestGenerationStamp(other.getLatestGenerationStamp()); if (other.hasRequestedChecksum()) { mergeRequestedChecksum(other.getRequestedChecksum()); if (other.hasCachingStrategy()) { mergeCachingStrategy(other.getCachingStrategy()); if (other.hasStorageType()) {
result = result && (hasHeader() == other.hasHeader()); if (hasHeader()) { result = result && getHeader() .equals(other.getHeader()); result = result && getTargetsList() .equals(other.getTargetsList()); result = result && (hasSource() == other.hasSource()); if (hasSource()) { result = result && getSource() .equals(other.getSource()); result = result && (hasStage() == other.hasStage()); if (hasStage()) { result = result && (getStage() == other.getStage()); result = result && (hasPipelineSize() == other.hasPipelineSize()); if (hasPipelineSize()) { result = result && (getPipelineSize() == other.getPipelineSize()); result = result && (hasMinBytesRcvd() == other.hasMinBytesRcvd()); if (hasMinBytesRcvd()) { result = result && (getMinBytesRcvd() == other.getMinBytesRcvd()); result = result && (hasMaxBytesRcvd() == other.hasMaxBytesRcvd()); if (hasMaxBytesRcvd()) {
result = result && (hasHeader() == other.hasHeader()); if (hasHeader()) { result = result && getHeader() .equals(other.getHeader()); result = result && getTargetsList() .equals(other.getTargetsList()); result = result && (hasSource() == other.hasSource()); if (hasSource()) { result = result && getSource() .equals(other.getSource()); result = result && (hasStage() == other.hasStage()); if (hasStage()) { result = result && (getStage() == other.getStage()); result = result && (hasPipelineSize() == other.hasPipelineSize()); if (hasPipelineSize()) { result = result && (getPipelineSize() == other.getPipelineSize()); result = result && (hasMinBytesRcvd() == other.hasMinBytesRcvd()); if (hasMinBytesRcvd()) { result = result && (getMinBytesRcvd() == other.getMinBytesRcvd()); result = result && (hasMaxBytesRcvd() == other.hasMaxBytesRcvd()); if (hasMaxBytesRcvd()) {
result = result && (hasHeader() == other.hasHeader()); if (hasHeader()) { result = result && getHeader() .equals(other.getHeader()); result = result && getTargetsList() .equals(other.getTargetsList()); result = result && (hasSource() == other.hasSource()); if (hasSource()) { result = result && getSource() .equals(other.getSource()); result = result && (hasStage() == other.hasStage()); if (hasStage()) { result = result && (getStage() == other.getStage()); result = result && (hasPipelineSize() == other.hasPipelineSize()); if (hasPipelineSize()) { result = result && (getPipelineSize() == other.getPipelineSize()); result = result && (hasMinBytesRcvd() == other.hasMinBytesRcvd()); if (hasMinBytesRcvd()) { result = result && (getMinBytesRcvd() == other.getMinBytesRcvd()); result = result && (hasMaxBytesRcvd() == other.hasMaxBytesRcvd()); if (hasMaxBytesRcvd()) {
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasHeader()) { hash = (37 * hash) + HEADER_FIELD_NUMBER; hash = (53 * hash) + getHeader().hashCode(); if (getTargetsCount() > 0) { hash = (37 * hash) + TARGETS_FIELD_NUMBER; hash = (53 * hash) + getTargetsList().hashCode(); if (hasSource()) { hash = (37 * hash) + SOURCE_FIELD_NUMBER; hash = (53 * hash) + getSource().hashCode(); if (hasStage()) { hash = (37 * hash) + STAGE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getStage()); if (hasPipelineSize()) { hash = (37 * hash) + PIPELINESIZE_FIELD_NUMBER; hash = (53 * hash) + getPipelineSize(); if (hasMinBytesRcvd()) { hash = (37 * hash) + MINBYTESRCVD_FIELD_NUMBER; hash = (53 * hash) + hashLong(getMinBytesRcvd()); if (hasMaxBytesRcvd()) { hash = (37 * hash) + MAXBYTESRCVD_FIELD_NUMBER; hash = (53 * hash) + hashLong(getMaxBytesRcvd());
if (!hasHeader()) { memoizedIsInitialized = 0; return false; if (!hasStage()) { memoizedIsInitialized = 0; return false; if (!hasPipelineSize()) { memoizedIsInitialized = 0; return false; if (!hasMinBytesRcvd()) { memoizedIsInitialized = 0; return false; if (!hasMaxBytesRcvd()) { memoizedIsInitialized = 0; return false; if (!hasLatestGenerationStamp()) { memoizedIsInitialized = 0; return false; if (!hasRequestedChecksum()) { memoizedIsInitialized = 0; return false; if (!getHeader().isInitialized()) {
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasHeader()) { hash = (37 * hash) + HEADER_FIELD_NUMBER; hash = (53 * hash) + getHeader().hashCode(); if (getTargetsCount() > 0) { hash = (37 * hash) + TARGETS_FIELD_NUMBER; hash = (53 * hash) + getTargetsList().hashCode(); if (hasSource()) { hash = (37 * hash) + SOURCE_FIELD_NUMBER; hash = (53 * hash) + getSource().hashCode(); if (hasStage()) { hash = (37 * hash) + STAGE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getStage()); if (hasPipelineSize()) { hash = (37 * hash) + PIPELINESIZE_FIELD_NUMBER; hash = (53 * hash) + getPipelineSize(); if (hasMinBytesRcvd()) { hash = (37 * hash) + MINBYTESRCVD_FIELD_NUMBER; hash = (53 * hash) + hashLong(getMinBytesRcvd()); if (hasMaxBytesRcvd()) { hash = (37 * hash) + MAXBYTESRCVD_FIELD_NUMBER; hash = (53 * hash) + hashLong(getMaxBytesRcvd());
if (!hasHeader()) { memoizedIsInitialized = 0; return false; if (!hasStage()) { memoizedIsInitialized = 0; return false; if (!hasPipelineSize()) { memoizedIsInitialized = 0; return false; if (!hasMinBytesRcvd()) { memoizedIsInitialized = 0; return false; if (!hasMaxBytesRcvd()) { memoizedIsInitialized = 0; return false; if (!hasLatestGenerationStamp()) { memoizedIsInitialized = 0; return false; if (!hasRequestedChecksum()) { memoizedIsInitialized = 0; return false; if (!getHeader().isInitialized()) {
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpWriteBlockProto.getDefaultInstance()) return this; if (other.hasHeader()) { mergeHeader(other.getHeader()); if (other.hasSource()) { mergeSource(other.getSource()); if (other.hasStage()) { setStage(other.getStage()); if (other.hasPipelineSize()) { setPipelineSize(other.getPipelineSize()); if (other.hasMinBytesRcvd()) { setMinBytesRcvd(other.getMinBytesRcvd()); if (other.hasMaxBytesRcvd()) { setMaxBytesRcvd(other.getMaxBytesRcvd()); if (other.hasLatestGenerationStamp()) { setLatestGenerationStamp(other.getLatestGenerationStamp()); if (other.hasRequestedChecksum()) { mergeRequestedChecksum(other.getRequestedChecksum()); if (other.hasCachingStrategy()) { mergeCachingStrategy(other.getCachingStrategy()); if (other.hasStorageType()) {
public void writeTo(io.prestosql.hadoop.$internal.com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, header_); output.writeBool(15, targetPinnings_.get(i)); getUnknownFields().writeTo(output);
if (!hasHeader()) { memoizedIsInitialized = 0; return false; if (!hasStage()) { memoizedIsInitialized = 0; return false; if (!hasPipelineSize()) { memoizedIsInitialized = 0; return false; if (!hasMinBytesRcvd()) { memoizedIsInitialized = 0; return false; if (!hasMaxBytesRcvd()) { memoizedIsInitialized = 0; return false; if (!hasLatestGenerationStamp()) { memoizedIsInitialized = 0; return false; if (!hasRequestedChecksum()) { memoizedIsInitialized = 0; return false; if (!getHeader().isInitialized()) {
hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasHeader()) { hash = (37 * hash) + HEADER_FIELD_NUMBER; hash = (53 * hash) + getHeader().hashCode(); if (getTargetsCount() > 0) { hash = (37 * hash) + TARGETS_FIELD_NUMBER; hash = (53 * hash) + getTargetsList().hashCode(); if (hasSource()) { hash = (37 * hash) + SOURCE_FIELD_NUMBER; hash = (53 * hash) + getSource().hashCode(); if (hasStage()) { hash = (37 * hash) + STAGE_FIELD_NUMBER; hash = (53 * hash) + hashEnum(getStage()); if (hasPipelineSize()) { hash = (37 * hash) + PIPELINESIZE_FIELD_NUMBER; hash = (53 * hash) + getPipelineSize(); if (hasMinBytesRcvd()) { hash = (37 * hash) + MINBYTESRCVD_FIELD_NUMBER; hash = (53 * hash) + hashLong(getMinBytesRcvd()); if (hasMaxBytesRcvd()) { hash = (37 * hash) + MAXBYTESRCVD_FIELD_NUMBER; hash = (53 * hash) + hashLong(getMaxBytesRcvd());
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { getSerializedSize(); if (((bitField0_ & 0x00000001) == 0x00000001)) { output.writeMessage(1, header_); output.writeBool(15, targetPinnings_.get(i)); getUnknownFields().writeTo(output);
/** Receive OP_WRITE_BLOCK */ private void opWriteBlock(DataInputStream in) throws IOException { final OpWriteBlockProto proto = OpWriteBlockProto.parseFrom(vintPrefixed(in)); final DatanodeInfo[] targets = PBHelper.convert(proto.getTargetsList()); TraceScope traceScope = continueTraceSpan(proto.getHeader(), proto.getClass().getSimpleName()); try { writeBlock(PBHelper.convert(proto.getHeader().getBaseHeader().getBlock()), PBHelper.convertStorageType(proto.getStorageType()), PBHelper.convert(proto.getHeader().getBaseHeader().getToken()), proto.getHeader().getClientName(), targets, PBHelper.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length), PBHelper.convert(proto.getSource()), fromProto(proto.getStage()), proto.getPipelineSize(), proto.getMinBytesRcvd(), proto.getMaxBytesRcvd(), proto.getLatestGenerationStamp(), fromProto(proto.getRequestedChecksum()), (proto.hasCachingStrategy() ? getCachingStrategy(proto.getCachingStrategy()) : CachingStrategy.newDefaultStrategy()), (proto.hasAllowLazyPersist() ? proto.getAllowLazyPersist() : false), (proto.hasPinning() ? proto.getPinning(): false), (PBHelper.convertBooleanList(proto.getTargetPinningsList()))); } finally { if (traceScope != null) traceScope.close(); } }
/** Receive OP_WRITE_BLOCK */ private void opWriteBlock(DataInputStream in) throws IOException { final OpWriteBlockProto proto = OpWriteBlockProto.parseFrom(vintPrefixed(in)); final DatanodeInfo[] targets = PBHelper.convert(proto.getTargetsList()); TraceScope traceScope = continueTraceSpan(proto.getHeader(), proto.getClass().getSimpleName()); try { writeBlock(PBHelper.convert(proto.getHeader().getBaseHeader().getBlock()), PBHelper.convertStorageType(proto.getStorageType()), PBHelper.convert(proto.getHeader().getBaseHeader().getToken()), proto.getHeader().getClientName(), targets, PBHelper.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length), PBHelper.convert(proto.getSource()), fromProto(proto.getStage()), proto.getPipelineSize(), proto.getMinBytesRcvd(), proto.getMaxBytesRcvd(), proto.getLatestGenerationStamp(), fromProto(proto.getRequestedChecksum()), (proto.hasCachingStrategy() ? getCachingStrategy(proto.getCachingStrategy()) : CachingStrategy.newDefaultStrategy()), (proto.hasAllowLazyPersist() ? proto.getAllowLazyPersist() : false), (proto.hasPinning() ? proto.getPinning(): false), (PBHelper.convertBooleanList(proto.getTargetPinningsList()))); } finally { if (traceScope != null) traceScope.close(); } }
final OpWriteBlockProto proto = OpWriteBlockProto.parseFrom(vintPrefixed(in)); final DatanodeInfo[] targets = PBHelperClient.convert(proto.getTargetsList()); TraceScope traceScope = continueTraceSpan(proto.getHeader(), proto.getClass().getSimpleName()); try { writeBlock(PBHelperClient.convert(proto.getHeader().getBaseHeader().getBlock()), PBHelperClient.convertStorageType(proto.getStorageType()), PBHelperClient.convert(proto.getHeader().getBaseHeader().getToken()), proto.getHeader().getClientName(), targets, PBHelperClient.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length), PBHelperClient.convert(proto.getSource()), fromProto(proto.getStage()), proto.getPipelineSize(), proto.getMinBytesRcvd(), proto.getMaxBytesRcvd(), proto.getLatestGenerationStamp(), fromProto(proto.getRequestedChecksum()), (proto.hasCachingStrategy() ? getCachingStrategy(proto.getCachingStrategy()) : CachingStrategy.newDefaultStrategy()), (proto.hasAllowLazyPersist() ? proto.getAllowLazyPersist() : false), (proto.hasPinning() ? proto.getPinning(): false), (PBHelperClient.convertBooleanList(proto.getTargetPinningsList())), proto.getStorageId(), proto.getTargetStorageIdsList().toArray(new String[0])); } finally { if (traceScope != null) traceScope.close();
private static void requestWriteBlock(Channel channel, Enum<?> storageType, OpWriteBlockProto.Builder writeBlockProtoBuilder) throws IOException { OpWriteBlockProto proto = STORAGE_TYPE_SETTER.set(writeBlockProtoBuilder, storageType).build(); int protoLen = proto.getSerializedSize(); ByteBuf buffer = channel.alloc().buffer(3 + CodedOutputStream.computeRawVarint32Size(protoLen) + protoLen); buffer.writeShort(DataTransferProtocol.DATA_TRANSFER_VERSION); buffer.writeByte(Op.WRITE_BLOCK.code); proto.writeDelimitedTo(new ByteBufOutputStream(buffer)); channel.writeAndFlush(buffer); }