SegmentStateProto logToSync = bestResponse.getSegmentState(); assert segmentTxId == logToSync.getStartTxId();
SegmentStateProto r1Seg = r1.getSegmentState(); SegmentStateProto r2Seg = r2.getSegmentState(); .compare(r1.getSegmentState().getEndTxId(), r2.getSegmentState().getEndTxId()) .result();
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSegmentState()) { hash = (37 * hash) + SEGMENTSTATE_FIELD_NUMBER; hash = (53 * hash) + getSegmentState().hashCode(); } if (hasAcceptedInEpoch()) { hash = (37 * hash) + ACCEPTEDINEPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getAcceptedInEpoch()); } if (hasLastWriterEpoch()) { hash = (37 * hash) + LASTWRITEREPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastWriterEpoch()); } if (hasLastCommittedTxId()) { hash = (37 * hash) + LASTCOMMITTEDTXID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastCommittedTxId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
SegmentStateProto logToSync = bestResponse.getSegmentState(); assert segmentTxId == logToSync.getStartTxId();
SegmentStateProto logToSync = bestResponse.getSegmentState(); assert segmentTxId == logToSync.getStartTxId();
ch.acceptRecovery(prep.getSegmentState(), new URL("file:///dev/null")).get(); prep = ch.prepareRecovery(1L).get(); assertEquals(1L, prep.getAcceptedInEpoch()); assertEquals(1L, prep.getSegmentState().getEndTxId()); ch.acceptRecovery(prep.getSegmentState(), new URL("file:///dev/null")).get(); fail("accept from earlier epoch not rejected"); } catch (ExecutionException ioe) {
SegmentStateProto r1Seg = r1.getSegmentState(); SegmentStateProto r2Seg = r2.getSegmentState(); .compare(r1.getSegmentState().getEndTxId(), r2.getSegmentState().getEndTxId()) .result();
result = result && (hasSegmentState() == other.hasSegmentState()); if (hasSegmentState()) { result = result && getSegmentState() .equals(other.getSegmentState());
SegmentStateProto r1Seg = r1.getSegmentState(); SegmentStateProto r2Seg = r2.getSegmentState(); .compare(r1.getSegmentState().getEndTxId(), r2.getSegmentState().getEndTxId()) .result();
result = result && (hasSegmentState() == other.hasSegmentState()); if (hasSegmentState()) { result = result && getSegmentState() .equals(other.getSegmentState());
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSegmentState()) { hash = (37 * hash) + SEGMENTSTATE_FIELD_NUMBER; hash = (53 * hash) + getSegmentState().hashCode(); } if (hasAcceptedInEpoch()) { hash = (37 * hash) + ACCEPTEDINEPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getAcceptedInEpoch()); } if (hasLastWriterEpoch()) { hash = (37 * hash) + LASTWRITEREPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastWriterEpoch()); } if (hasLastCommittedTxId()) { hash = (37 * hash) + LASTCOMMITTEDTXID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastCommittedTxId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
@java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptorForType().hashCode(); if (hasSegmentState()) { hash = (37 * hash) + SEGMENTSTATE_FIELD_NUMBER; hash = (53 * hash) + getSegmentState().hashCode(); } if (hasAcceptedInEpoch()) { hash = (37 * hash) + ACCEPTEDINEPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getAcceptedInEpoch()); } if (hasLastWriterEpoch()) { hash = (37 * hash) + LASTWRITEREPOCH_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastWriterEpoch()); } if (hasLastCommittedTxId()) { hash = (37 * hash) + LASTCOMMITTEDTXID_FIELD_NUMBER; hash = (53 * hash) + hashLong(getLastCommittedTxId()); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; }
public Builder mergeFrom(org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto other) { if (other == org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto.getDefaultInstance()) return this; if (other.hasSegmentState()) { mergeSegmentState(other.getSegmentState()); } if (other.hasAcceptedInEpoch()) { setAcceptedInEpoch(other.getAcceptedInEpoch()); } if (other.hasLastWriterEpoch()) { setLastWriterEpoch(other.getLastWriterEpoch()); } if (other.hasLastCommittedTxId()) { setLastCommittedTxId(other.getLastCommittedTxId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasLastWriterEpoch()) { memoizedIsInitialized = 0; return false; } if (hasSegmentState()) { if (!getSegmentState().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; }
public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasLastWriterEpoch()) { memoizedIsInitialized = 0; return false; } if (hasSegmentState()) { if (!getSegmentState().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; }
public Builder mergeFrom(org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto other) { if (other == org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto.getDefaultInstance()) return this; if (other.hasSegmentState()) { mergeSegmentState(other.getSegmentState()); } if (other.hasAcceptedInEpoch()) { setAcceptedInEpoch(other.getAcceptedInEpoch()); } if (other.hasLastWriterEpoch()) { setLastWriterEpoch(other.getLastWriterEpoch()); } if (other.hasLastCommittedTxId()) { setLastCommittedTxId(other.getLastCommittedTxId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }
result = result && (hasSegmentState() == other.hasSegmentState()); if (hasSegmentState()) { result = result && getSegmentState() .equals(other.getSegmentState());
public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized != -1) return isInitialized == 1; if (!hasLastWriterEpoch()) { memoizedIsInitialized = 0; return false; } if (hasSegmentState()) { if (!getSegmentState().isInitialized()) { memoizedIsInitialized = 0; return false; } } memoizedIsInitialized = 1; return true; }
public Builder mergeFrom(org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto other) { if (other == org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.PrepareRecoveryResponseProto.getDefaultInstance()) return this; if (other.hasSegmentState()) { mergeSegmentState(other.getSegmentState()); } if (other.hasAcceptedInEpoch()) { setAcceptedInEpoch(other.getAcceptedInEpoch()); } if (other.hasLastWriterEpoch()) { setLastWriterEpoch(other.getLastWriterEpoch()); } if (other.hasLastCommittedTxId()) { setLastCommittedTxId(other.getLastCommittedTxId()); } this.mergeUnknownFields(other.getUnknownFields()); return this; }