diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt index 30561da0a4c..72b85c90fee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.HDFS-2802.txt @@ -103,3 +103,6 @@ Branch-2802 Snapshot (Unreleased) HDFS-4395. In INodeDirectorySnapshottable's constructor, the passed-in dir could be an INodeDirectoryWithSnapshot. (Jing Zhao via szetszwo) + + HDFS-4397. Fix a bug in INodeDirectoryWithSnapshot.Diff.combinePostDiff(..) + that it may put the wrong node into the deleted list. (szetszwo) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java index d7f226f6489..8566c49584e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java @@ -17,7 +17,6 @@ */ package org.apache.hadoop.hdfs.server.namenode; -import java.io.IOException; import java.io.PrintWriter; import java.io.StringWriter; import java.util.Arrays; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java index 19c86e855d8..afbf494551a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java @@ -211,7 +211,7 @@ public class INodeFile extends INode implements BlockCollection { } @Override - protected int collectSubtreeBlocksAndClear(BlocksMapUpdateInfo info) { + public int collectSubtreeBlocksAndClear(BlocksMapUpdateInfo info) { parent = null; if(blocks != null && info != null) { for (BlockInfo blk : blocks) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java index ac90ac4c8e8..0a37ae04a2e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectorySnapshottable.java @@ -242,7 +242,6 @@ public class INodeDirectorySnapshottable extends INodeDirectoryWithSnapshot { Snapshot snapshot) { super.dumpTreeRecursively(out, prefix, snapshot); - try { if (snapshot == null) { out.println(); out.print(prefix); @@ -296,8 +295,5 @@ public class INodeDirectorySnapshottable extends INodeDirectoryWithSnapshot { } }); } - } catch(Exception e) { - throw new RuntimeException("this=" + this, e); - } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectoryWithSnapshot.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectoryWithSnapshot.java index d2fcfe4858e..2e6b3598fd2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectoryWithSnapshot.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/INodeDirectoryWithSnapshot.java @@ -26,6 +26,7 @@ import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.server.namenode.INode; import org.apache.hadoop.hdfs.server.namenode.INodeDirectory; import org.apache.hadoop.hdfs.server.namenode.INodeDirectoryWithQuota; +import org.apache.hadoop.hdfs.server.namenode.INodeFile; import org.apache.hadoop.hdfs.util.ReadOnlyList; import com.google.common.annotations.VisibleForTesting; @@ -58,7 +59,7 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { * 1.1. create i in current: add it to c-list (c, 0) * 1.1.1. create i in current and then create: impossible * 1.1.2. create i in current and then delete: remove it from c-list (0, 0) - * 1.1.3. create i in current and then modify: replace it in c-list (c, 0) + * 1.1.3. create i in current and then modify: replace it in c-list (c', 0) * * 1.2. delete i from current: impossible * @@ -75,7 +76,7 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { * 2.3. modify i in current: put it in both c-list and d-list (c, d) * 2.3.1. modify i in current and then create: impossible * 2.3.2. modify i in current and then delete: remove it from c-list (0, d) - * 2.3.3. modify i in current and then modify: replace it in c-list (c, d) + * 2.3.3. modify i in current and then modify: replace it in c-list (c', d) * */ static class Diff { @@ -192,8 +193,11 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { INode previous = null; Integer d = null; if (c >= 0) { - // Case 1.1.3: inode is already in c-list, + // Case 1.1.3 and 2.3.3: inode is already in c-list, previous = created.set(c, newinode); + + //TODO: fix a bug that previous != oldinode. Set it to oldinode for now + previous = oldinode; } else { d = search(deleted, oldinode); if (d < 0) { @@ -322,38 +326,23 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { * Note that after this function the postDiff will be deleted. * * @param the posterior diff to combine - * @param collectedBlocks Used in case 2.3, 3.1, and 3.3 to collect - * information for blocksMap update + * @param deletedINodeProcesser Used in case 2.1, 2.3, 3.1, and 3.3 + * to process the deleted inodes. */ - void combinePostDiff(Diff postDiff, BlocksMapUpdateInfo collectedBlocks) { + void combinePostDiff(Diff postDiff, Processor deletedINodeProcesser) { while (postDiff.created != null && !postDiff.created.isEmpty()) { - INode node = postDiff.created.remove(postDiff.created.size() - 1); - int deletedIndex = search(postDiff.deleted, node); + final INode c = postDiff.created.remove(postDiff.created.size() - 1); + final int deletedIndex = search(postDiff.deleted, c); if (deletedIndex < 0) { - // for case 1 - create(node); + // case 1 + create(c); } else { + final INode d = postDiff.deleted.remove(deletedIndex); // case 3 - int createdIndex = search(created, node); - if (createdIndex < 0) { - // 3.4 - create(node); - insertDeleted(node, search(deleted, node)); - } else { - // 3.1 and 3.3 - created.set(createdIndex, node); - // for 3.1 and 3.3, if the node is an INodeFileWithLink, need to - // remove d in the posterior diff from the circular list, also do - // possible block deletion and blocksMap updating - INode dInPost = postDiff.deleted.get(deletedIndex); - if (dInPost instanceof INodeFileWithLink) { - // dInPost must be an INodeFileWithLink - ((INodeFileWithLink) dInPost) - .collectSubtreeBlocksAndClear(collectedBlocks); - } + final Triple triple = modify(d, c); + if (deletedINodeProcesser != null) { + deletedINodeProcesser.process(triple.middle); } - // also remove the inode from the deleted list - postDiff.deleted.remove(deletedIndex); } } @@ -361,12 +350,8 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { // case 2 INode node = postDiff.deleted.remove(postDiff.deleted.size() - 1); Triple triple = delete(node); - // for 2.3, if the node is an INodeFileWithLink, need to remove c' from - // the circular list - INode cInCurrent = triple.middle; - if (cInCurrent instanceof INodeFileWithLink) { - ((INodeFileWithLink) cInCurrent) - .collectSubtreeBlocksAndClear(collectedBlocks); + if (deletedINodeProcesser != null) { + deletedINodeProcesser.process(triple.middle); } } } @@ -487,10 +472,12 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { private List initChildren() { if (children == null) { - final ReadOnlyList posterior = posteriorDiff != null? - posteriorDiff.getChildrenList() - : INodeDirectoryWithSnapshot.this.getChildrenList(null); - children = diff.apply2Current(ReadOnlyList.Util.asList(posterior)); + final Diff combined = new Diff(); + for(SnapshotDiff d = SnapshotDiff.this; d != null; d = d.posteriorDiff) { + combined.combinePostDiff(d.diff, null); + } + children = combined.apply2Current(ReadOnlyList.Util.asList( + INodeDirectoryWithSnapshot.this.getChildrenList(null))); } return children; } @@ -542,6 +529,12 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { } } + /** An interface for passing a method to process inodes. */ + static interface Processor { + /** Process the given inode. */ + void process(INode inode); + } + /** Create an {@link INodeDirectoryWithSnapshot} with the given snapshot.*/ public static INodeDirectoryWithSnapshot newInstance(INodeDirectory dir, Snapshot latest) { @@ -576,7 +569,7 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { * Null if the snapshot with the given name does not exist. */ SnapshotDiff deleteSnapshotDiff(Snapshot snapshot, - BlocksMapUpdateInfo collectedBlocks) { + final BlocksMapUpdateInfo collectedBlocks) { int snapshotIndex = Collections.binarySearch(diffs, snapshot); if (snapshotIndex == -1) { return null; @@ -586,7 +579,16 @@ public class INodeDirectoryWithSnapshot extends INodeDirectoryWithQuota { if (snapshotIndex > 0) { // combine the to-be-removed diff with its previous diff SnapshotDiff previousDiff = diffs.get(snapshotIndex - 1); - previousDiff.diff.combinePostDiff(diffToRemove.diff, collectedBlocks); + previousDiff.diff.combinePostDiff(diffToRemove.diff, new Processor() { + /** Collect blocks for deleted files. */ + @Override + public void process(INode inode) { + if (inode != null && inode instanceof INodeFile) { + ((INodeFile)inode).collectSubtreeBlocksAndClear(collectedBlocks); + } + } + }); + previousDiff.posteriorDiff = diffToRemove.posteriorDiff; diffToRemove.posteriorDiff = null; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestINodeDirectoryWithSnapshot.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestINodeDirectoryWithSnapshot.java index 3b3660d3af0..c71e31a4ea8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestINodeDirectoryWithSnapshot.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestINodeDirectoryWithSnapshot.java @@ -48,7 +48,7 @@ public class TestINodeDirectoryWithSnapshot { public void testDiff() throws Exception { for(int startSize = 0; startSize <= 1000; startSize = nextStep(startSize)) { for(int m = 0; m <= 10000; m = nextStep(m)) { - runDiffTest(startSize, m, true); + runDiffTest(startSize, m); } } } @@ -68,7 +68,7 @@ public class TestINodeDirectoryWithSnapshot { * @param numModifications * @param computeDiff */ - void runDiffTest(int startSize, int numModifications, boolean computeDiff) { + void runDiffTest(int startSize, int numModifications) { final int width = findWidth(startSize + numModifications); System.out.println("\nstartSize=" + startSize + ", numModifications=" + numModifications @@ -83,9 +83,15 @@ public class TestINodeDirectoryWithSnapshot { // make modifications to current and record the diff final List current = new ArrayList(previous); - final Diff diff = computeDiff? new Diff(): null; + + final Diff[] diffs = new Diff[5]; + for(int j = 0; j < diffs.length; j++) { + diffs[j] = new Diff(); + } for(int m = 0; m < numModifications; m++) { + final int j = m * diffs.length / numModifications; + // if current is empty, the next operation must be create; // otherwise, randomly pick an operation. final int nextOperation = current.isEmpty()? 1: RANDOM.nextInt(3) + 1; @@ -93,55 +99,85 @@ public class TestINodeDirectoryWithSnapshot { case 1: // create { final INode i = newINode(n++, width); - create(i, current, diff); + create(i, current, diffs[j]); break; } case 2: // delete { final INode i = current.get(RANDOM.nextInt(current.size())); - delete(i, current, diff); + delete(i, current, diffs[j]); break; } case 3: // modify { final INode i = current.get(RANDOM.nextInt(current.size())); - modify(i, current, diff); + modify(i, current, diffs[j]); break; } } } - if (computeDiff) { - // check if current == previous + diff - final List c = diff.apply2Previous(previous); + { + // check if current == previous + diffs + List c = previous; + for(int i = 0; i < diffs.length; i++) { + c = diffs[i].apply2Previous(c); + } if (!hasIdenticalElements(current, c)) { System.out.println("previous = " + Diff.toString(previous)); System.out.println(); System.out.println("current = " + Diff.toString(current)); System.out.println("c = " + Diff.toString(c)); - System.out.println(); - System.out.println("diff = " + diff); throw new AssertionError("current and c are not identical."); } - // check if previous == current - diff - final List p = diff.apply2Current(current); + // check if previous == current - diffs + List p = current; + for(int i = diffs.length - 1; i >= 0; i--) { + p = diffs[i].apply2Current(p); + } if (!hasIdenticalElements(previous, p)) { System.out.println("previous = " + Diff.toString(previous)); System.out.println("p = " + Diff.toString(p)); System.out.println(); System.out.println("current = " + Diff.toString(current)); - System.out.println(); - System.out.println("diff = " + diff); throw new AssertionError("previous and p are not identical."); } } - if (computeDiff) { + // combine all diffs + final Diff combined = diffs[0]; + for(int i = 1; i < diffs.length; i++) { + combined.combinePostDiff(diffs[i], null); + } + + { + // check if current == previous + combined + final List c = combined.apply2Previous(previous); + if (!hasIdenticalElements(current, c)) { + System.out.println("previous = " + Diff.toString(previous)); + System.out.println(); + System.out.println("current = " + Diff.toString(current)); + System.out.println("c = " + Diff.toString(c)); + throw new AssertionError("current and c are not identical."); + } + + // check if previous == current - combined + final List p = combined.apply2Current(current); + if (!hasIdenticalElements(previous, p)) { + System.out.println("previous = " + Diff.toString(previous)); + System.out.println("p = " + Diff.toString(p)); + System.out.println(); + System.out.println("current = " + Diff.toString(current)); + throw new AssertionError("previous and p are not identical."); + } + } + + { for(int m = 0; m < n; m++) { final INode inode = newINode(m, width); {// test accessPrevious - final INode[] array = diff.accessPrevious(inode.getLocalNameBytes()); + final INode[] array = combined.accessPrevious(inode.getLocalNameBytes()); final INode computed; if (array != null) { computed = array[0]; @@ -157,7 +193,7 @@ public class TestINodeDirectoryWithSnapshot { } {// test accessCurrent - final INode[] array = diff.accessCurrent(inode.getLocalNameBytes()); + final INode[] array = combined.accessCurrent(inode.getLocalNameBytes()); final INode computed; if (array != null) { computed = array[0]; @@ -239,8 +275,6 @@ public class TestINodeDirectoryWithSnapshot { static void delete(INode inode, final List current, Diff diff) { final int i = Diff.search(current, inode); - Assert.assertTrue("i=" + i + ", inode=" + inode + "\ncurrent=" + current, - i >= 0); current.remove(i); if (diff != null) { //test undo with 1/UNDO_TEST_P probability