HDFS-16121. Iterative snapshot diff report can generate duplicate records for creates, deletes and Renames. (#3188)

(cherry picked from commit 6450c1ea9a)
This commit is contained in:
bshashikant 2021-07-09 10:26:42 +05:30 committed by S O'Donnell
parent e3fb63f33f
commit ab939814a3
2 changed files with 57 additions and 1 deletions

View File

@ -116,7 +116,7 @@ boolean addDirDiff(long dirId, byte[][] parent, ChildrenDiff diff) {
if (lastIndex == -1 || lastIndex >= clist.size()) {
final List<INode> dlist = diff.getDeletedUnmodifiable();
int size = dlist.size();
int size = clist.size();
ListIterator<INode> iterator = lastIndex != -1 ?
dlist.listIterator(lastIndex - size): dlist.listIterator();
while (iterator.hasNext()) {
@ -130,6 +130,10 @@ boolean addDirDiff(long dirId, byte[][] parent, ChildrenDiff diff) {
deletedList.add(e);
} else {
setLastPath(parent);
// the offset will be set to created list + iterator index in the
// deleted list so that it points to the exact entry in the deleted
// list post checking the created list in the next iteration of rpc
// call
setLastIndex(size + iterator.nextIndex());
return false;
}

View File

@ -1400,6 +1400,58 @@ public void testDiffReportWithRpcLimit2() throws Exception {
DFSUtil.string2Bytes("dir3/file3")));
}
/**
* Tests to verify the diff report with maximum SnapsdiffReportEntries limit
* over an rpc being set to 3.
* @throws Exception
*/
@Test
public void testDiffReportWithRpcLimit3() throws Exception {
final Path root = new Path("/");
hdfs.mkdirs(root);
Path path = new Path(root, "dir1");
hdfs.mkdirs(path);
for (int j = 1; j <= 4; j++) {
final Path file = new Path(path, "file" + j);
DFSTestUtil.createFile(hdfs, file, BLOCKSIZE, REPLICATION, SEED);
}
SnapshotTestHelper.createSnapshot(hdfs, root, "s0");
path = new Path(root, "dir1");
for (int j = 1; j <= 4; j++) {
final Path file = new Path(path, "file" + j);
hdfs.delete(file, false);
}
for (int j = 5; j <= 10; j++) {
final Path file = new Path(path, "file" + j);
DFSTestUtil.createFile(hdfs, file, BLOCKSIZE, REPLICATION, SEED);
}
SnapshotTestHelper.createSnapshot(hdfs, root, "s1");
verifyDiffReport(root, "s0", "s1",
new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("")),
new DiffReportEntry(DiffType.MODIFY, DFSUtil.string2Bytes("dir1")),
new DiffReportEntry(DiffType.CREATE,
DFSUtil.string2Bytes("dir1/file5")),
new DiffReportEntry(DiffType.CREATE,
DFSUtil.string2Bytes("dir1/file6")),
new DiffReportEntry(DiffType.CREATE,
DFSUtil.string2Bytes("dir1/file7")),
new DiffReportEntry(DiffType.CREATE,
DFSUtil.string2Bytes("dir1/file8")),
new DiffReportEntry(DiffType.CREATE,
DFSUtil.string2Bytes("dir1/file9")),
new DiffReportEntry(DiffType.CREATE,
DFSUtil.string2Bytes("dir1/file10")),
new DiffReportEntry(DiffType.DELETE,
DFSUtil.string2Bytes("dir1/file1")),
new DiffReportEntry(DiffType.DELETE,
DFSUtil.string2Bytes("dir1/file2")),
new DiffReportEntry(DiffType.DELETE,
DFSUtil.string2Bytes("dir1/file3")),
new DiffReportEntry(DiffType.DELETE,
DFSUtil.string2Bytes("dir1/file4")));
}
private void verifyDiffReportForGivenReport(Path dirPath, String from,
String to, SnapshotDiffReport report, DiffReportEntry... entries)
throws IOException {