@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasBaseHeader()) { hash = (37 * hash) + BASEHEADER_FIELD_NUMBER; hash = (53 * hash) + getBaseHeader().hashCode(); } if (hasClientName()) { hash = (37 * hash) + CLIENTNAME_FIELD_NUMBER; hash = (53 * hash) + getClientName().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
ExtendedBlock blockCopy = new ExtendedBlock(locatedBlock.getBlock()); blockCopy.setNumBytes(locatedBlock.getBlockSize()); ClientOperationHeaderProto header = ClientOperationHeaderProto.newBuilder() .setBaseHeader(BaseHeaderProto.newBuilder().setBlock(PB_HELPER.convert(blockCopy)) .setToken(PB_HELPER.convert(locatedBlock.getBlockToken())))
public org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto buildPartial() { org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto result = new org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto(this); int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) == 0x00000001)) { to_bitField0_ |= 0x00000001; } if (baseHeaderBuilder_ == null) { result.baseHeader_ = baseHeader_; } else { result.baseHeader_ = baseHeaderBuilder_.build(); } if (((from_bitField0_ & 0x00000002) == 0x00000002)) { to_bitField0_ |= 0x00000002; } result.clientName_ = clientName_; result.bitField0_ = to_bitField0_; onBuilt(); return result; }
if (hasHeader()) { result = result && getHeader() .equals(other.getHeader());
if (hasHeader()) { result = result && getHeader() .equals(other.getHeader());
if (hasHeader()) { result = result && getHeader() .equals(other.getHeader());
public Builder clear() { super.clear(); if (headerBuilder_ == null) { header_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto.getDefaultInstance(); } else { headerBuilder_.clear();
public Builder clear() { super.clear(); if (headerBuilder_ == null) { header_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto.getDefaultInstance(); } else { headerBuilder_.clear();
public Builder clear() { super.clear(); if (headerBuilder_ == null) { header_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto.getDefaultInstance(); } else { headerBuilder_.clear();
if (hasHeader()) { result = result && getHeader() .equals(other.getHeader());
if (hasHeader()) { result = result && getHeader() .equals(other.getHeader());
if (hasHeader()) { result = result && getHeader() .equals(other.getHeader());
/** Receive OP_WRITE_BLOCK */ private void opWriteBlock(DataInputStream in) throws IOException { final OpWriteBlockProto proto = OpWriteBlockProto.parseFrom(vintPrefixed(in)); final DatanodeInfo[] targets = PBHelper.convert(proto.getTargetsList()); TraceScope traceScope = continueTraceSpan(proto.getHeader(), proto.getClass().getSimpleName()); try { writeBlock(PBHelper.convert(proto.getHeader().getBaseHeader().getBlock()), PBHelper.convertStorageType(proto.getStorageType()), PBHelper.convert(proto.getHeader().getBaseHeader().getToken()), proto.getHeader().getClientName(), targets, PBHelper.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length), PBHelper.convert(proto.getSource()), fromProto(proto.getStage()), proto.getPipelineSize(), proto.getMinBytesRcvd(), proto.getMaxBytesRcvd(), proto.getLatestGenerationStamp(), fromProto(proto.getRequestedChecksum()), (proto.hasCachingStrategy() ? getCachingStrategy(proto.getCachingStrategy()) : CachingStrategy.newDefaultStrategy()), (proto.hasAllowLazyPersist() ? proto.getAllowLazyPersist() : false), (proto.hasPinning() ? proto.getPinning(): false), (PBHelper.convertBooleanList(proto.getTargetPinningsList()))); } finally { if (traceScope != null) traceScope.close(); } }
/** Receive OP_WRITE_BLOCK */ private void opWriteBlock(DataInputStream in) throws IOException { final OpWriteBlockProto proto = OpWriteBlockProto.parseFrom(vintPrefixed(in)); final DatanodeInfo[] targets = PBHelper.convert(proto.getTargetsList()); TraceScope traceScope = continueTraceSpan(proto.getHeader(), proto.getClass().getSimpleName()); try { writeBlock(PBHelper.convert(proto.getHeader().getBaseHeader().getBlock()), PBHelper.convertStorageType(proto.getStorageType()), PBHelper.convert(proto.getHeader().getBaseHeader().getToken()), proto.getHeader().getClientName(), targets, PBHelper.convertStorageTypes(proto.getTargetStorageTypesList(), targets.length), PBHelper.convert(proto.getSource()), fromProto(proto.getStage()), proto.getPipelineSize(), proto.getMinBytesRcvd(), proto.getMaxBytesRcvd(), proto.getLatestGenerationStamp(), fromProto(proto.getRequestedChecksum()), (proto.hasCachingStrategy() ? getCachingStrategy(proto.getCachingStrategy()) : CachingStrategy.newDefaultStrategy()), (proto.hasAllowLazyPersist() ? proto.getAllowLazyPersist() : false), (proto.hasPinning() ? proto.getPinning(): false), (PBHelper.convertBooleanList(proto.getTargetPinningsList()))); } finally { if (traceScope != null) traceScope.close(); } }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpTransferBlockProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpTransferBlockProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.OpTransferBlockProto) obj; boolean result = true; result = result && (hasHeader() == other.hasHeader()); if (hasHeader()) { result = result && getHeader() .equals(other.getHeader()); } result = result && getTargetsList() .equals(other.getTargetsList()); result = result && getTargetStorageTypesList() .equals(other.getTargetStorageTypesList()); result = result && getTargetStorageIdsList() .equals(other.getTargetStorageIdsList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto) obj; boolean result = true; result = result && (hasBaseHeader() == other.hasBaseHeader()); if (hasBaseHeader()) { result = result && getBaseHeader() .equals(other.getBaseHeader()); } result = result && (hasClientName() == other.hasClientName()); if (hasClientName()) { result = result && getClientName() .equals(other.getClientName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto) obj; boolean result = true; result = result && (hasBaseHeader() == other.hasBaseHeader()); if (hasBaseHeader()) { result = result && getBaseHeader() .equals(other.getBaseHeader()); } result = result && (hasClientName() == other.hasClientName()); if (hasClientName()) { result = result && getClientName() .equals(other.getClientName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
public Builder clear() { super.clear(); if (headerBuilder_ == null) { header_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto.getDefaultInstance(); } else { headerBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); offset_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); len_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); sendChecksums_ = true; bitField0_ = (bitField0_ & ~0x00000008); if (cachingStrategyBuilder_ == null) { cachingStrategy_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto.getDefaultInstance(); } else { cachingStrategyBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto) obj; boolean result = true; result = result && (hasBaseHeader() == other.hasBaseHeader()); if (hasBaseHeader()) { result = result && getBaseHeader() .equals(other.getBaseHeader()); } result = result && (hasClientName() == other.hasClientName()); if (hasClientName()) { result = result && getClientName() .equals(other.getClientName()); } result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
public Builder clear() { super.clear(); if (headerBuilder_ == null) { header_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.ClientOperationHeaderProto.getDefaultInstance(); } else { headerBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); offset_ = 0L; bitField0_ = (bitField0_ & ~0x00000002); len_ = 0L; bitField0_ = (bitField0_ & ~0x00000004); sendChecksums_ = true; bitField0_ = (bitField0_ & ~0x00000008); if (cachingStrategyBuilder_ == null) { cachingStrategy_ = org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.CachingStrategyProto.getDefaultInstance(); } else { cachingStrategyBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000010); return this; }