From ab7509efe7375949db3b819b53e9461624cd2b1e Mon Sep 17 00:00:00 2001 From: Shashikant Banerjee Date: Sun, 18 Aug 2019 19:00:38 -0700 Subject: [PATCH] HDFS-13101. Yet another fsimage corruption related to snapshot. Contributed by Shashikant Banerjee. Signed-off-by: Wei-Chiu Chuang --- .../hadoop/hdfs/server/namenode/INode.java | 13 +++ .../hdfs/server/namenode/INodeDirectory.java | 8 ++ .../hdfs/server/namenode/INodeFile.java | 12 +++ .../snapshot/AbstractINodeDiffList.java | 15 ++- .../DirectoryWithSnapshotFeature.java | 23 ++-- .../snapshot/FileWithSnapshotFeature.java | 5 + .../namenode/TestFSImageWithSnapshot.java | 100 +++++++++++++++++- .../namenode/snapshot/SnapshotTestHelper.java | 3 +- 8 files changed, 169 insertions(+), 10 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java index 6c5ad2ffcf8..27c4244c226 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java @@ -40,6 +40,7 @@ import org.apache.hadoop.hdfs.server.blockmanagement.BlockUnderConstructionFeatu import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.server.namenode.INodeReference.DstReference; +import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithCount; import org.apache.hadoop.hdfs.server.namenode.INodeReference.WithName; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.util.Diff; @@ -657,6 +658,18 @@ public abstract class INode implements INodeAttributes, Diff.Element { return parent == null || !parent.isReference()? null: (INodeReference)parent; } + /** + * @return true if this is a reference and the reference count is 1; + * otherwise, return false. + */ + public boolean isLastReference() { + final INodeReference ref = getParentReference(); + if (!(ref instanceof WithCount)) { + return false; + } + return ((WithCount)ref).getReferenceCount() == 1; + } + /** Set parent directory */ public final void setParent(INodeDirectory parent) { this.parent = parent; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java index 3b7fa4e3fa4..a42cd44155c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeDirectory.java @@ -875,6 +875,14 @@ public class INodeDirectory extends INodeWithAdditionalFields prefix.setLength(prefix.length() - 2); prefix.append(" "); } + + final DirectoryWithSnapshotFeature snapshotFeature = + getDirectoryWithSnapshotFeature(); + if (snapshotFeature != null) { + out.print(prefix); + out.print(snapshotFeature); + } + out.println(); dumpTreeRecursively(out, prefix, new Iterable() { final Iterator i = getChildrenList(snapshot).iterator(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java index 418c7f4e3c5..1a349bf1688 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INodeFile.java @@ -799,6 +799,18 @@ public class INodeFile extends INodeWithAdditionalFields out.print(", blocks="); out.print(blocks.length == 0 ? null: blocks[0]); out.println(); + + final FileWithSnapshotFeature snapshotFeature = + getFileWithSnapshotFeature(); + if (snapshotFeature != null) { + if (prefix.length() >= 2) { + prefix.setLength(prefix.length() - 2); + prefix.append(" "); + } + out.print(prefix); + out.print(snapshotFeature); + } + out.println(); } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiffList.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiffList.java index df709581e44..b1d9eb3bbc7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiffList.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/snapshot/AbstractINodeDiffList.java @@ -307,6 +307,19 @@ abstract class AbstractINodeDiffList " + dst); + hdfs.rename(src, dst); + printTree("After rename " + src + " -> " + dst); + } + + void createFile(Path directory, String filename) throws Exception { + final Path f = new Path(directory, filename); + DFSTestUtil.createFile(hdfs, f, 0, NUM_DATANODES, seed); + } + + void appendFile(Path directory, String filename) throws Exception { + final Path f = new Path(directory, filename); + DFSTestUtil.appendFile(hdfs, f, "more data"); + printTree("appended " + f); + } + + void deleteSnapshot(Path directory, String snapshotName) throws Exception { + hdfs.deleteSnapshot(directory, snapshotName); + printTree("deleted snapshot " + snapshotName); + } + + @Test (timeout=60000) + public void testDoubleRename() throws Exception { + final Path parent = new Path("/parent"); + hdfs.mkdirs(parent); + final Path sub1 = new Path(parent, "sub1"); + final Path sub1foo = new Path(sub1, "foo"); + hdfs.mkdirs(sub1); + hdfs.mkdirs(sub1foo); + createFile(sub1foo, "file0"); + + printTree("before s0"); + hdfs.allowSnapshot(parent); + hdfs.createSnapshot(parent, "s0"); + + createFile(sub1foo, "file1"); + createFile(sub1foo, "file2"); + + final Path sub2 = new Path(parent, "sub2"); + hdfs.mkdirs(sub2); + final Path sub2foo = new Path(sub2, "foo"); + // mv /parent/sub1/foo to /parent/sub2/foo + rename(sub1foo, sub2foo); + + hdfs.createSnapshot(parent, "s1"); + hdfs.createSnapshot(parent, "s2"); + printTree("created snapshots: s1, s2"); + + appendFile(sub2foo, "file1"); + createFile(sub2foo, "file3"); + + final Path sub3 = new Path(parent, "sub3"); + hdfs.mkdirs(sub3); + // mv /parent/sub2/foo to /parent/sub3/foo + rename(sub2foo, sub3); + + hdfs.delete(sub3, true); + printTree("deleted " + sub3); + + deleteSnapshot(parent, "s1"); + restartCluster(); + + deleteSnapshot(parent, "s2"); + restartCluster(); + } + + void restartCluster() throws Exception { + final File before = dumpTree2File("before.txt"); + + hdfs.setSafeMode(SafeModeAction.SAFEMODE_ENTER); + hdfs.saveNamespace(); + hdfs.setSafeMode(SafeModeAction.SAFEMODE_LEAVE); + + cluster.shutdown(); + cluster = new MiniDFSCluster.Builder(conf).format(false) + .numDataNodes(NUM_DATANODES).build(); + cluster.waitActive(); + fsn = cluster.getNamesystem(); + hdfs = cluster.getFileSystem(); + final File after = dumpTree2File("after.txt"); + SnapshotTestHelper.compareDumpedTreeInFile(before, after, true); + } + + private final PrintWriter output = new PrintWriter(System.out, true); + private int printTreeCount = 0; + + String printTree(String label) throws Exception { + output.println(); + output.println(); + output.println("***** " + printTreeCount++ + ": " + label); + final String b = + fsn.getFSDirectory().getINode("/").dumpTreeRecursively().toString(); + output.println(b); + return b; + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java index 2ea2a9853f2..93653bbc1c2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java @@ -238,8 +238,7 @@ public class SnapshotTestHelper { "\\{blockUCState=\\w+, primaryNodeIndex=[-\\d]+, replicas=\\[\\]\\}", ""); } - - assertEquals(line1, line2); + assertEquals(line1.trim(), line2.trim()); } Assert.assertNull(reader1.readLine()); Assert.assertNull(reader2.readLine());