diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirMkdirOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirMkdirOp.java index 2d1914f1625..4d8d7d7a8a7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirMkdirOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirMkdirOp.java @@ -66,7 +66,7 @@ static HdfsFileStatus mkdirs(FSNamesystem fsn, String src, } if (!createParent) { - fsd.verifyParentDir(iip, src); + fsd.verifyParentDir(iip); } // validate that we have enough inodes. This is, at best, a diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirSymlinkOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirSymlinkOp.java index 6938a848487..71362f8be6d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirSymlinkOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirSymlinkOp.java @@ -58,7 +58,7 @@ static HdfsFileStatus createSymlinkInt( iip = fsd.resolvePathForWrite(pc, link, false); link = iip.getPath(); if (!createParent) { - fsd.verifyParentDir(iip, link); + fsd.verifyParentDir(iip); } if (!fsd.isValidToCreate(link, iip)) { throw new IOException( diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java index 40be83b0dbd..aab0f763e7a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java @@ -323,7 +323,7 @@ static INodesInPath resolvePathForStartFile(FSDirectory dir, } } else { if (!createParent) { - dir.verifyParentDir(iip, src); + dir.verifyParentDir(iip); } if (!flag.contains(CreateFlag.CREATE)) { throw new FileNotFoundException("Can't overwrite non-existent " + src); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java index 8456da61811..a059ee56bfb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirectory.java @@ -1765,17 +1765,16 @@ HdfsFileStatus getAuditFileInfo(INodesInPath iip) /** * Verify that parent directory of src exists. */ - void verifyParentDir(INodesInPath iip, String src) + void verifyParentDir(INodesInPath iip) throws FileNotFoundException, ParentNotDirectoryException { - Path parent = new Path(src).getParent(); - if (parent != null) { + if (iip.length() > 2) { final INode parentNode = iip.getINode(-2); if (parentNode == null) { throw new FileNotFoundException("Parent directory doesn't exist: " - + parent); - } else if (!parentNode.isDirectory() && !parentNode.isSymlink()) { + + iip.getParentPath()); + } else if (!parentNode.isDirectory()) { throw new ParentNotDirectoryException("Parent path is not a directory: " - + parent); + + iip.getParentPath()); } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSDirectory.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSDirectory.java index 2b43c0f9536..071bdf7e4a6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSDirectory.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSDirectory.java @@ -20,6 +20,7 @@ import java.io.BufferedReader; +import java.io.FileNotFoundException; import java.io.IOException; import java.io.StringReader; import java.util.EnumSet; @@ -30,6 +31,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.XAttrSetFlag; @@ -386,4 +388,50 @@ public void testXAttrMultiAddRemoveErrors() throws Exception { XAttrSetFlag.REPLACE)); verifyXAttrsPresent(newXAttrs, 4); } + + @Test + public void testVerifyParentDir() throws Exception { + hdfs.mkdirs(new Path("/dir1/dir2")); + hdfs.createNewFile(new Path("/dir1/file")); + hdfs.createNewFile(new Path("/dir1/dir2/file")); + + INodesInPath iip = fsdir.resolvePath(null, "/"); + fsdir.verifyParentDir(iip); + + iip = fsdir.resolvePath(null, "/dir1"); + fsdir.verifyParentDir(iip); + + iip = fsdir.resolvePath(null, "/dir1/file"); + fsdir.verifyParentDir(iip); + + iip = fsdir.resolvePath(null, "/dir-nonexist/file"); + try { + fsdir.verifyParentDir(iip); + fail("expected FNF"); + } catch (FileNotFoundException fnf) { + // expected. + } + + iip = fsdir.resolvePath(null, "/dir1/dir2"); + fsdir.verifyParentDir(iip); + + iip = fsdir.resolvePath(null, "/dir1/dir2/file"); + fsdir.verifyParentDir(iip); + + iip = fsdir.resolvePath(null, "/dir1/dir-nonexist/file"); + try { + fsdir.verifyParentDir(iip); + fail("expected FNF"); + } catch (FileNotFoundException fnf) { + // expected. + } + + iip = fsdir.resolvePath(null, "/dir1/file/fail"); + try { + fsdir.verifyParentDir(iip); + fail("expected FNF"); + } catch (ParentNotDirectoryException pnd) { + // expected. + } + } }