diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 5175e8229c9..805a7c00e44 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -401,6 +401,9 @@ Release 2.5.0 - UNRELEASED HDFS-6552. add DN storage to a BlockInfo will not replace the different storage from same DN. (Amir Langer via Arpit Agarwal) + HDFS-6551. Rename with OVERWRITE option may throw NPE when the target + file/directory is a reference INode. (jing9) + BREAKDOWN OF HDFS-2006 SUBTASKS AND RELATED JIRAS HDFS-6299. Protobuf for XAttr and client-side implementation. (Yi Liu via umamahesh) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java index d0d255c271a..6879f34f95d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java @@ -44,7 +44,6 @@ import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.AclStatus; -import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.DFSConfigKeys; @@ -892,9 +891,10 @@ boolean unprotectedRenameTo(String src, String dst, long timestamp, boolean undoRemoveDst = false; INode removedDst = null; + long removedNum = 0; try { if (dstInode != null) { // dst exists remove it - if (removeLastINode(dstIIP) != -1) { + if ((removedNum = removeLastINode(dstIIP)) != -1) { removedDst = dstIIP.getLastINode(); undoRemoveDst = true; } @@ -934,13 +934,15 @@ boolean unprotectedRenameTo(String src, String dst, long timestamp, long filesDeleted = -1; if (removedDst != null) { undoRemoveDst = false; - BlocksMapUpdateInfo collectedBlocks = new BlocksMapUpdateInfo(); - List removedINodes = new ChunkedArrayList(); - filesDeleted = removedDst.cleanSubtree(Snapshot.CURRENT_STATE_ID, - dstIIP.getLatestSnapshotId(), collectedBlocks, removedINodes, true) - .get(Quota.NAMESPACE); - getFSNamesystem().removePathAndBlocks(src, collectedBlocks, - removedINodes); + if (removedNum > 0) { + BlocksMapUpdateInfo collectedBlocks = new BlocksMapUpdateInfo(); + List removedINodes = new ChunkedArrayList(); + filesDeleted = removedDst.cleanSubtree(Snapshot.CURRENT_STATE_ID, + dstIIP.getLatestSnapshotId(), collectedBlocks, removedINodes, + true).get(Quota.NAMESPACE); + getFSNamesystem().removePathAndBlocks(src, collectedBlocks, + removedINodes); + } } if (snapshottableDirs.size() > 0) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java index baa6edbb329..c88aaf2410e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java @@ -171,8 +171,6 @@ public void testRenameFromSDir2NonSDir() throws Exception { private static boolean existsInDiffReport(List entries, DiffType type, String relativePath) { for (DiffReportEntry entry : entries) { - System.out.println("DiffEntry is:" + entry.getType() + "\"" - + new String(entry.getRelativePath()) + "\""); if ((entry.getType() == type) && ((new String(entry.getRelativePath())).compareTo(relativePath) == 0)) { return true; @@ -2374,4 +2372,46 @@ public void testAppendFileAfterRenameInSnapshot() throws Exception { // save namespace and restart restartClusterAndCheckImage(true); } + + @Test + public void testRenameWithOverWrite() throws Exception { + final Path root = new Path("/"); + final Path foo = new Path(root, "foo"); + final Path file1InFoo = new Path(foo, "file1"); + final Path file2InFoo = new Path(foo, "file2"); + final Path file3InFoo = new Path(foo, "file3"); + DFSTestUtil.createFile(hdfs, file1InFoo, 1L, REPL, SEED); + DFSTestUtil.createFile(hdfs, file2InFoo, 1L, REPL, SEED); + DFSTestUtil.createFile(hdfs, file3InFoo, 1L, REPL, SEED); + final Path bar = new Path(root, "bar"); + hdfs.mkdirs(bar); + + SnapshotTestHelper.createSnapshot(hdfs, root, "s0"); + // move file1 from foo to bar + final Path fileInBar = new Path(bar, "file1"); + hdfs.rename(file1InFoo, fileInBar); + // rename bar to newDir + final Path newDir = new Path(root, "newDir"); + hdfs.rename(bar, newDir); + // move file2 from foo to newDir + final Path file2InNewDir = new Path(newDir, "file2"); + hdfs.rename(file2InFoo, file2InNewDir); + // move file3 from foo to newDir and rename it to file1, this will overwrite + // the original file1 + final Path file1InNewDir = new Path(newDir, "file1"); + hdfs.rename(file3InFoo, file1InNewDir, Rename.OVERWRITE); + SnapshotTestHelper.createSnapshot(hdfs, root, "s1"); + + SnapshotDiffReport report = hdfs.getSnapshotDiffReport(root, "s0", "s1"); + LOG.info("DiffList is \n\"" + report.toString() + "\""); + List entries = report.getDiffList(); + assertEquals(7, entries.size()); + assertTrue(existsInDiffReport(entries, DiffType.MODIFY, "")); + assertTrue(existsInDiffReport(entries, DiffType.MODIFY, foo.getName())); + assertTrue(existsInDiffReport(entries, DiffType.DELETE, bar.getName())); + assertTrue(existsInDiffReport(entries, DiffType.CREATE, newDir.getName())); + assertTrue(existsInDiffReport(entries, DiffType.DELETE, "foo/file1")); + assertTrue(existsInDiffReport(entries, DiffType.DELETE, "foo/file2")); + assertTrue(existsInDiffReport(entries, DiffType.DELETE, "foo/file3")); + } }