@Override protected void channelRead0(ChannelHandlerContext ctx, PipelineAckProto ack) throws Exception { Status reply = getStatus(ack); if (reply != Status.SUCCESS) { failed(ctx.channel(), () -> new IOException("Bad response " + reply + " for block " + block + " from datanode " + ctx.channel().remoteAddress())); return; } if (PipelineAck.isRestartOOBStatus(reply)) { failed(ctx.channel(), () -> new IOException("Restart response " + reply + " for block " + block + " from datanode " + ctx.channel().remoteAddress())); return; } if (ack.getSeqno() == HEART_BEAT_SEQNO) { return; } completed(ctx.channel()); }
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto.getDefaultInstance()) return this; if (other.hasSeqno()) { setSeqno(other.getSeqno());
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto.getDefaultInstance()) return this; if (other.hasSeqno()) { setSeqno(other.getSeqno());
public Builder mergeFrom(org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto other) { if (other == org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto.getDefaultInstance()) return this; if (other.hasSeqno()) { setSeqno(other.getSeqno());
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSeqno()) { hash = (37 * hash) + SEQNO_FIELD_NUMBER; hash = (53 * hash) + hashLong(getSeqno()); } if (getReplyCount() > 0) { hash = (37 * hash) + REPLY_FIELD_NUMBER; hash = (53 * hash) + hashEnumList(getReplyList()); } if (hasDownstreamAckTimeNanos()) { hash = (37 * hash) + DOWNSTREAMACKTIMENANOS_FIELD_NUMBER; hash = (53 * hash) + hashLong(getDownstreamAckTimeNanos()); } if (getFlagCount() > 0) { hash = (37 * hash) + FLAG_FIELD_NUMBER; hash = (53 * hash) + getFlagList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto) obj; boolean result = true; result = result && (hasSeqno() == other.hasSeqno()); if (hasSeqno()) { result = result && (getSeqno() == other.getSeqno()); } result = result && getReplyList() .equals(other.getReplyList()); result = result && (hasDownstreamAckTimeNanos() == other.hasDownstreamAckTimeNanos()); if (hasDownstreamAckTimeNanos()) { result = result && (getDownstreamAckTimeNanos() == other.getDownstreamAckTimeNanos()); } result = result && getFlagList() .equals(other.getFlagList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto) obj; boolean result = true; result = result && (hasSeqno() == other.hasSeqno()); if (hasSeqno()) { result = result && (getSeqno() == other.getSeqno()); } result = result && getReplyList() .equals(other.getReplyList()); result = result && (hasDownstreamAckTimeNanos() == other.hasDownstreamAckTimeNanos()); if (hasDownstreamAckTimeNanos()) { result = result && (getDownstreamAckTimeNanos() == other.getDownstreamAckTimeNanos()); } result = result && getFlagList() .equals(other.getFlagList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSeqno()) { hash = (37 * hash) + SEQNO_FIELD_NUMBER; hash = (53 * hash) + hashLong(getSeqno()); } if (getReplyCount() > 0) { hash = (37 * hash) + REPLY_FIELD_NUMBER; hash = (53 * hash) + hashEnumList(getReplyList()); } if (hasDownstreamAckTimeNanos()) { hash = (37 * hash) + DOWNSTREAMACKTIMENANOS_FIELD_NUMBER; hash = (53 * hash) + hashLong(getDownstreamAckTimeNanos()); } if (getFlagCount() > 0) { hash = (37 * hash) + FLAG_FIELD_NUMBER; hash = (53 * hash) + getFlagList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSeqno()) { hash = (37 * hash) + SEQNO_FIELD_NUMBER; hash = (53 * hash) + hashLong(getSeqno()); } if (getReplyCount() > 0) { hash = (37 * hash) + REPLY_FIELD_NUMBER; hash = (53 * hash) + hashEnumList(getReplyList()); } if (hasDownstreamAckTimeNanos()) { hash = (37 * hash) + DOWNSTREAMACKTIMENANOS_FIELD_NUMBER; hash = (53 * hash) + hashLong(getDownstreamAckTimeNanos()); } if (getFlagCount() > 0) { hash = (37 * hash) + FLAG_FIELD_NUMBER; hash = (53 * hash) + getFlagList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto)) { return super.equals(obj); } org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto other = (org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos.PipelineAckProto) obj; boolean result = true; result = result && (hasSeqno() == other.hasSeqno()); if (hasSeqno()) { result = result && (getSeqno() == other.getSeqno()); } result = result && getReplyList() .equals(other.getReplyList()); result = result && (hasDownstreamAckTimeNanos() == other.hasDownstreamAckTimeNanos()); if (hasDownstreamAckTimeNanos()) { result = result && (getDownstreamAckTimeNanos() == other.getDownstreamAckTimeNanos()); } result = result && getFlagList() .equals(other.getFlagList()); result = result && getUnknownFields().equals(other.getUnknownFields()); return result; }
/** * Get the sequence number * @return the sequence number */ public long getSeqno() { return proto.getSeqno(); }
/** * Get the sequence number * @return the sequence number */ public long getSeqno() { return proto.getSeqno(); }
/** * Get the sequence number * @return the sequence number */ public long getSeqno() { return proto.getSeqno(); }