HDFS-3943. QJM: remove currently-unused md5sum field. Contributed by Todd Lipcon.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-3077@1386863 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
853db9ec24
commit
83c14fbd24
|
@ -74,3 +74,5 @@ HDFS-3840. JournalNodes log JournalNotFormattedException backtrace error before
|
|||
HDFS-3894. QJM: testRecoverAfterDoubleFailures can be flaky due to IPC client caching (todd)
|
||||
|
||||
HDFS-3926. QJM: Add user documentation for QJM. (atm)
|
||||
|
||||
HDFS-3943. QJM: remove currently-unused md5sum field (todd)
|
||||
|
|
|
@ -71,10 +71,8 @@ class SegmentRecoveryComparator
|
|||
}
|
||||
|
||||
if (!r1Seg.getIsInProgress()) {
|
||||
// If both are finalized, they should match lengths, and be considered
|
||||
// equal
|
||||
if (r1Seg.getEndTxId() != r2Seg.getEndTxId() ||
|
||||
!r1Seg.getMd5Sum().equals(r2Seg.getMd5Sum())) {
|
||||
// If both are finalized, they should match lengths
|
||||
if (r1Seg.getEndTxId() != r2Seg.getEndTxId()) {
|
||||
throw new AssertionError("finalized segs with different lengths: " +
|
||||
r1 + ", " + r2);
|
||||
}
|
||||
|
|
|
@ -59,7 +59,6 @@ import com.google.common.base.Preconditions;
|
|||
import com.google.common.base.Stopwatch;
|
||||
import com.google.common.collect.Range;
|
||||
import com.google.common.collect.Ranges;
|
||||
import com.google.protobuf.ByteString;
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
/**
|
||||
|
@ -628,7 +627,6 @@ class Journal implements Closeable {
|
|||
.setStartTxId(segmentTxId)
|
||||
.setEndTxId(elf.getLastTxId())
|
||||
.setIsInProgress(elf.isInProgress())
|
||||
.setMd5Sum(ByteString.EMPTY) // TODO
|
||||
.build();
|
||||
LOG.info("getSegmentInfo(" + segmentTxId + "): " + elf + " -> " +
|
||||
TextFormat.shortDebugString(ret));
|
||||
|
@ -655,8 +653,7 @@ class Journal implements Closeable {
|
|||
|
||||
if (previouslyAccepted != null && !hasFinalizedSegment) {
|
||||
SegmentStateProto acceptedState = previouslyAccepted.getSegmentState();
|
||||
assert acceptedState.getEndTxId() == segInfo.getEndTxId() &&
|
||||
acceptedState.getMd5Sum().equals(segInfo.getMd5Sum()) :
|
||||
assert acceptedState.getEndTxId() == segInfo.getEndTxId() :
|
||||
"prev accepted: " + TextFormat.shortDebugString(previouslyAccepted)+ "\n" +
|
||||
"on disk: " + TextFormat.shortDebugString(segInfo);
|
||||
|
||||
|
|
|
@ -44,7 +44,6 @@ message SegmentStateProto {
|
|||
required uint64 startTxId = 1;
|
||||
required uint64 endTxId = 2;
|
||||
required bool isInProgress = 3;
|
||||
required bytes md5sum = 4;
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -28,7 +28,6 @@ import org.junit.Test;
|
|||
import org.mockito.Mockito;
|
||||
|
||||
import com.google.common.collect.Maps;
|
||||
import com.google.protobuf.ByteString;
|
||||
|
||||
import static org.apache.hadoop.hdfs.qjournal.client.SegmentRecoveryComparator.INSTANCE;
|
||||
|
||||
|
@ -46,7 +45,6 @@ public class TestSegmentRecoveryComparator {
|
|||
.setSegmentState(SegmentStateProto.newBuilder()
|
||||
.setStartTxId(1L)
|
||||
.setEndTxId(3L)
|
||||
.setMd5Sum(ByteString.EMPTY)
|
||||
.setIsInProgress(true))
|
||||
.setLastWriterEpoch(0L)
|
||||
.build());
|
||||
|
@ -55,7 +53,6 @@ public class TestSegmentRecoveryComparator {
|
|||
.setSegmentState(SegmentStateProto.newBuilder()
|
||||
.setStartTxId(1L)
|
||||
.setEndTxId(4L)
|
||||
.setMd5Sum(ByteString.EMPTY)
|
||||
.setIsInProgress(true))
|
||||
.setLastWriterEpoch(0L)
|
||||
.build());
|
||||
|
@ -64,7 +61,6 @@ public class TestSegmentRecoveryComparator {
|
|||
.setSegmentState(SegmentStateProto.newBuilder()
|
||||
.setStartTxId(1L)
|
||||
.setEndTxId(4L)
|
||||
.setMd5Sum(ByteString.EMPTY)
|
||||
.setIsInProgress(true))
|
||||
.setLastWriterEpoch(0L)
|
||||
.setAcceptedInEpoch(1L)
|
||||
|
@ -75,7 +71,6 @@ public class TestSegmentRecoveryComparator {
|
|||
.setSegmentState(SegmentStateProto.newBuilder()
|
||||
.setStartTxId(1L)
|
||||
.setEndTxId(3L)
|
||||
.setMd5Sum(ByteString.EMPTY)
|
||||
.setIsInProgress(false))
|
||||
.setLastWriterEpoch(0L)
|
||||
.build());
|
||||
|
|
Loading…
Reference in New Issue