if (bestResponse.hasAcceptedInEpoch()) { LOG.info("Using already-accepted recovery for segment " + "starting at txid " + segmentTxId + ": " +
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSegmentState()) { hash = (37 * hash) + SEGMENTSTATE_FIELD_NUMBER; hash = (53 * hash) + getSegmentState().hashCode(); } if (hasAcceptedInEpoch()) { hash = (37 * hash) + ACCEPTEDINEPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getAcceptedInEpoch()); } if (hasLastWriterEpoch()) { hash = (37 * hash) + LASTWRITEREPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastWriterEpoch()); } if (hasLastCommittedTxId()) { hash = (37 * hash) + LASTCOMMITTEDTXID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastCommittedTxId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
.equals(other.getSegmentState()); result = result && (hasAcceptedInEpoch() == other.hasAcceptedInEpoch()); if (hasAcceptedInEpoch()) { result = result && (getAcceptedInEpoch() == other.getAcceptedInEpoch());
if (bestResponse.hasAcceptedInEpoch()) { LOG.info("Using already-accepted recovery for segment " + "starting at txid " + segmentTxId + ": " +
if (bestResponse.hasAcceptedInEpoch()) { LOG.info("Using already-accepted recovery for segment " + "starting at txid " + segmentTxId + ": " +
assertFalse(prep.hasAcceptedInEpoch()); assertFalse(prep.hasSegmentState()); assertFalse(prep.hasAcceptedInEpoch()); assertTrue(prep.hasSegmentState());
.equals(other.getSegmentState()); result = result && (hasAcceptedInEpoch() == other.hasAcceptedInEpoch()); if (hasAcceptedInEpoch()) { result = result && (getAcceptedInEpoch() == other.getAcceptedInEpoch());
.equals(other.getSegmentState()); result = result && (hasAcceptedInEpoch() == other.hasAcceptedInEpoch()); if (hasAcceptedInEpoch()) { result = result && (getAcceptedInEpoch() == other.getAcceptedInEpoch());
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSegmentState()) { hash = (37 * hash) + SEGMENTSTATE_FIELD_NUMBER; hash = (53 * hash) + getSegmentState().hashCode(); } if (hasAcceptedInEpoch()) { hash = (37 * hash) + ACCEPTEDINEPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getAcceptedInEpoch()); } if (hasLastWriterEpoch()) { hash = (37 * hash) + LASTWRITEREPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastWriterEpoch()); } if (hasLastCommittedTxId()) { hash = (37 * hash) + LASTCOMMITTEDTXID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastCommittedTxId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSegmentState()) { hash = (37 * hash) + SEGMENTSTATE_FIELD_NUMBER; hash = (53 * hash) + getSegmentState().hashCode(); } if (hasAcceptedInEpoch()) { hash = (37 * hash) + ACCEPTEDINEPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getAcceptedInEpoch()); } if (hasLastWriterEpoch()) { hash = (37 * hash) + LASTWRITEREPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastWriterEpoch()); } if (hasLastCommittedTxId()) { hash = (37 * hash) + LASTCOMMITTEDTXID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastCommittedTxId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public Builder mergeFrom(org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto other) { if (other == org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto.getDefaultInstance()) return this; if (other.hasSegmentState()) { mergeSegmentState(other.getSegmentState()); } if (other.hasAcceptedInEpoch()) { setAcceptedInEpoch(other.getAcceptedInEpoch()); } if (other.hasLastWriterEpoch()) { setLastWriterEpoch(other.getLastWriterEpoch()); } if (other.hasLastCommittedTxId()) { setLastCommittedTxId(other.getLastCommittedTxId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto other) { if (other == org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto.getDefaultInstance()) return this; if (other.hasSegmentState()) { mergeSegmentState(other.getSegmentState()); } if (other.hasAcceptedInEpoch()) { setAcceptedInEpoch(other.getAcceptedInEpoch()); } if (other.hasLastWriterEpoch()) { setLastWriterEpoch(other.getLastWriterEpoch()); } if (other.hasLastCommittedTxId()) { setLastCommittedTxId(other.getLastCommittedTxId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public Builder mergeFrom(org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto other) { if (other == org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto.getDefaultInstance()) return this; if (other.hasSegmentState()) { mergeSegmentState(other.getSegmentState()); } if (other.hasAcceptedInEpoch()) { setAcceptedInEpoch(other.getAcceptedInEpoch()); } if (other.hasLastWriterEpoch()) { setLastWriterEpoch(other.getLastWriterEpoch()); } if (other.hasLastCommittedTxId()) { setLastCommittedTxId(other.getLastCommittedTxId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }