HDFS-10980. Optimize check for existence of parent directory. Contributed by Daryn Sharp.

This commit is contained in:
Kihwal Lee 2016-10-07 17:20:15 -05:00
parent f3f37e6fb8
commit e57fa81d95
5 changed files with 56 additions and 9 deletions

View File

@ -66,7 +66,7 @@ static HdfsFileStatus mkdirs(FSNamesystem fsn, String src,
}
if (!createParent) {
fsd.verifyParentDir(iip, src);
fsd.verifyParentDir(iip);
}
// validate that we have enough inodes. This is, at best, a

View File

@ -58,7 +58,7 @@ static HdfsFileStatus createSymlinkInt(
iip = fsd.resolvePathForWrite(pc, link, false);
link = iip.getPath();
if (!createParent) {
fsd.verifyParentDir(iip, link);
fsd.verifyParentDir(iip);
}
if (!fsd.isValidToCreate(link, iip)) {
throw new IOException(

View File

@ -323,7 +323,7 @@ static INodesInPath resolvePathForStartFile(FSDirectory dir,
}
} else {
if (!createParent) {
dir.verifyParentDir(iip, src);
dir.verifyParentDir(iip);
}
if (!flag.contains(CreateFlag.CREATE)) {
throw new FileNotFoundException("Can't overwrite non-existent " + src);

View File

@ -1765,17 +1765,16 @@ HdfsFileStatus getAuditFileInfo(INodesInPath iip)
/**
* Verify that parent directory of src exists.
*/
void verifyParentDir(INodesInPath iip, String src)
void verifyParentDir(INodesInPath iip)
throws FileNotFoundException, ParentNotDirectoryException {
Path parent = new Path(src).getParent();
if (parent != null) {
if (iip.length() > 2) {
final INode parentNode = iip.getINode(-2);
if (parentNode == null) {
throw new FileNotFoundException("Parent directory doesn't exist: "
+ parent);
} else if (!parentNode.isDirectory() && !parentNode.isSymlink()) {
+ iip.getParentPath());
} else if (!parentNode.isDirectory()) {
throw new ParentNotDirectoryException("Parent path is not a directory: "
+ parent);
+ iip.getParentPath());
}
}
}

View File

@ -20,6 +20,7 @@
import java.io.BufferedReader;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.StringReader;
import java.util.EnumSet;
@ -30,6 +31,7 @@
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ParentNotDirectoryException;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.XAttr;
import org.apache.hadoop.fs.XAttrSetFlag;
@ -386,4 +388,50 @@ public void testXAttrMultiAddRemoveErrors() throws Exception {
XAttrSetFlag.REPLACE));
verifyXAttrsPresent(newXAttrs, 4);
}
@Test
public void testVerifyParentDir() throws Exception {
hdfs.mkdirs(new Path("/dir1/dir2"));
hdfs.createNewFile(new Path("/dir1/file"));
hdfs.createNewFile(new Path("/dir1/dir2/file"));
INodesInPath iip = fsdir.resolvePath(null, "/");
fsdir.verifyParentDir(iip);
iip = fsdir.resolvePath(null, "/dir1");
fsdir.verifyParentDir(iip);
iip = fsdir.resolvePath(null, "/dir1/file");
fsdir.verifyParentDir(iip);
iip = fsdir.resolvePath(null, "/dir-nonexist/file");
try {
fsdir.verifyParentDir(iip);
fail("expected FNF");
} catch (FileNotFoundException fnf) {
// expected.
}
iip = fsdir.resolvePath(null, "/dir1/dir2");
fsdir.verifyParentDir(iip);
iip = fsdir.resolvePath(null, "/dir1/dir2/file");
fsdir.verifyParentDir(iip);
iip = fsdir.resolvePath(null, "/dir1/dir-nonexist/file");
try {
fsdir.verifyParentDir(iip);
fail("expected FNF");
} catch (FileNotFoundException fnf) {
// expected.
}
iip = fsdir.resolvePath(null, "/dir1/file/fail");
try {
fsdir.verifyParentDir(iip);
fail("expected FNF");
} catch (ParentNotDirectoryException pnd) {
// expected.
}
}
}