diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt index 4c567902c86..4d27ae99d44 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES_HDFS-5535.txt @@ -71,3 +71,5 @@ HDFS-5535 subtasks: HDFS-5992. Fix NPE in MD5FileUtils and update editsStored for TestOfflineEditsViewer. (szetszwo) + + HDFS-5994. Fix TestDataNodeRollingUpgrade. (Arpit Agarwal via szetszwo) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceStorage.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceStorage.java index 43d51cd1a64..74dc4cf8df4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceStorage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/BlockPoolSliceStorage.java @@ -392,11 +392,12 @@ public class BlockPoolSliceStorage extends Storage { if (child.isDirectory()) { // Recurse to process subdirectories. filesRestored += restoreBlockFilesFromTrash(child); + continue; } if (restoreDirectory == null) { restoreDirectory = new File(getRestoreDirectory(child)); - if (!restoreDirectory.mkdirs()) { + if (!restoreDirectory.exists() && !restoreDirectory.mkdirs()) { throw new IOException("Failed to create directory " + restoreDirectory); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetAsyncDiskService.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetAsyncDiskService.java index b53f6ee373a..d80f7292090 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetAsyncDiskService.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/FsDatasetAsyncDiskService.java @@ -196,9 +196,8 @@ class FsDatasetAsyncDiskService { } private boolean moveFiles() { - File newBlockFile = new File(trashDirectory, blockFile.getName()); - File newMetaFile = new File(trashDirectory, metaFile.getName()); - if (!new File(trashDirectory).mkdirs()) { + File trashDirFile = new File(trashDirectory); + if (!trashDirFile.exists() && !trashDirFile.mkdirs()) { LOG.error("Failed to create trash directory " + trashDirectory); return false; } @@ -207,6 +206,9 @@ class FsDatasetAsyncDiskService { LOG.debug("Moving files " + blockFile.getName() + " and " + metaFile.getName() + " to trash."); } + + File newBlockFile = new File(trashDirectory, blockFile.getName()); + File newMetaFile = new File(trashDirectory, metaFile.getName()); return (blockFile.renameTo(newBlockFile) && metaFile.renameTo(newMetaFile)); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeRollingUpgrade.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeRollingUpgrade.java index 1ae37105d4a..77908d9129a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeRollingUpgrade.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeRollingUpgrade.java @@ -46,7 +46,8 @@ public class TestDataNodeRollingUpgrade { private static final Log LOG = LogFactory.getLog(TestDataNodeRollingUpgrade.class); private static final short REPL_FACTOR = 1; - private static final long FILE_SIZE = 1024L; + private static final int BLOCK_SIZE = 1024 * 1024; + private static final long FILE_SIZE = BLOCK_SIZE * 4; private static final long SEED = 0x1BADF00DL; Configuration conf; @@ -139,7 +140,7 @@ public class TestDataNodeRollingUpgrade { Path testFile1 = new Path("/TestDataNodeRollingUpgrade1.dat"); // Create files in DFS. - DFSTestUtil.createFile(fs, testFile1, FILE_SIZE, REPL_FACTOR, SEED); + DFSTestUtil.createFile(fs, testFile1, BLOCK_SIZE, BLOCK_SIZE, FILE_SIZE, REPL_FACTOR, SEED); String fileContents1 = DFSTestUtil.readFile(fs, testFile1); startRollingUpgrade();