HADOOP-8973. DiskChecker cannot reliably detect an inaccessible disk on Windows with NTFS ACLs. Contributed by Chris Nauroth.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1454889 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2013-03-10 18:17:41 +00:00
parent c1fabc5e91
commit f5227eb51c
2 changed files with 122 additions and 56 deletions

View File

@ -23,11 +23,10 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Shell;
/**
* Class that provides utility functions for checking disk problem
@ -36,10 +35,16 @@
@InterfaceStability.Unstable
public class DiskChecker {
private static final long SHELL_TIMEOUT = 10 * 1000;
public static class DiskErrorException extends IOException {
public DiskErrorException(String msg) {
super(msg);
}
public DiskErrorException(String msg, Throwable cause) {
super(msg, cause);
}
}
public static class DiskOutOfSpaceException extends IOException {
@ -85,25 +90,11 @@ public static boolean mkdirsWithExistsCheck(File dir) {
* @throws DiskErrorException
*/
public static void checkDir(File dir) throws DiskErrorException {
if (!mkdirsWithExistsCheck(dir))
if (!mkdirsWithExistsCheck(dir)) {
throw new DiskErrorException("Can not create directory: "
+ dir.toString());
if (!dir.isDirectory())
throw new DiskErrorException("Not a directory: "
+ dir.toString());
if (!dir.canRead())
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
if (!dir.canWrite())
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
if (!dir.canExecute())
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
checkDirAccess(dir);
}
/**
@ -152,24 +143,102 @@ public static void checkDir(LocalFileSystem localFS, Path dir,
FsPermission expected)
throws DiskErrorException, IOException {
mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
checkDirAccess(localFS.pathToFile(dir));
}
FileStatus stat = localFS.getFileStatus(dir);
FsPermission actual = stat.getPermission();
if (!stat.isDirectory())
throw new DiskErrorException("not a directory: "+ dir.toString());
FsAction user = actual.getUserAction();
if (!user.implies(FsAction.READ))
throw new DiskErrorException("directory is not readable: "
/**
* Checks that the given file is a directory and that the current running
* process can read, write, and execute it.
*
* @param dir File to check
* @throws DiskErrorException if dir is not a directory, not readable, not
* writable, or not executable
*/
private static void checkDirAccess(File dir) throws DiskErrorException {
if (!dir.isDirectory()) {
throw new DiskErrorException("Not a directory: "
+ dir.toString());
}
if (!user.implies(FsAction.WRITE))
throw new DiskErrorException("directory is not writable: "
+ dir.toString());
if (Shell.WINDOWS) {
checkAccessByFileSystemInteraction(dir);
} else {
checkAccessByFileMethods(dir);
}
}
if (!user.implies(FsAction.EXECUTE))
throw new DiskErrorException("directory is not listable: "
/**
* Checks that the current running process can read, write, and execute the
* given directory by using methods of the File object.
*
* @param dir File to check
* @throws DiskErrorException if dir is not readable, not writable, or not
* executable
*/
private static void checkAccessByFileMethods(File dir)
throws DiskErrorException {
if (!dir.canRead()) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
if (!dir.canWrite()) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
if (!dir.canExecute()) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
}
/**
* Checks that the current running process can read, write, and execute the
* given directory by attempting each of those operations on the file system.
* This method contains several workarounds to known JVM bugs that cause
* File.canRead, File.canWrite, and File.canExecute to return incorrect results
* on Windows with NTFS ACLs. See:
* http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=6203387
* These bugs are supposed to be fixed in JDK7.
*
* @param dir File to check
* @throws DiskErrorException if dir is not readable, not writable, or not
* executable
*/
private static void checkAccessByFileSystemInteraction(File dir)
throws DiskErrorException {
// Make sure we can read the directory by listing it.
if (dir.list() == null) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
// Make sure we can write to the directory by creating a temp file in it.
try {
File tempFile = File.createTempFile("checkDirAccess", null, dir);
if (!tempFile.delete()) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
} catch (IOException e) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString(), e);
}
// Make sure the directory is executable by trying to cd into it. This
// launches a separate process. It does not change the working directory of
// the current process.
try {
String[] cdCmd = new String[] { "cmd", "/C", "cd",
dir.getAbsolutePath() };
Shell.execCommand(null, cdCmd, SHELL_TIMEOUT);
} catch (Shell.ExitCodeException e) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString(), e);
} catch (IOException e) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString(), e);
}
}
}

View File

@ -25,10 +25,13 @@
import static org.mockito.Mockito.*;
import static org.apache.hadoop.test.MockitoMaker.*;
import org.apache.hadoop.fs.permission.FsPermission;
import static org.apache.hadoop.fs.permission.FsAction.*;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.Shell;
@ -110,29 +113,21 @@ public void testCheckDir_notListable() throws Throwable {
private void _checkDirs(boolean isDir, FsPermission perm, boolean success)
throws Throwable {
File localDir = make(stub(File.class).returning(true).from.exists());
when(localDir.mkdir()).thenReturn(true);
Path dir = mock(Path.class);
LocalFileSystem fs = make(stub(LocalFileSystem.class)
.returning(localDir).from.pathToFile(dir));
FileStatus stat = make(stub(FileStatus.class)
.returning(perm).from.getPermission());
when(stat.isDirectory()).thenReturn(isDir);
when(fs.getFileStatus(dir)).thenReturn(stat);
File localDir = File.createTempFile("test", "tmp");
if (isDir) {
localDir.delete();
localDir.mkdir();
}
Shell.execCommand(Shell.getSetPermissionCommand(String.format("%04o",
perm.toShort()), false, localDir.getAbsolutePath()));
try {
DiskChecker.checkDir(fs, dir, perm);
verify(stat).isDirectory();
verify(fs, times(2)).getFileStatus(dir);
verify(stat, times(2)).getPermission();
DiskChecker.checkDir(FileSystem.getLocal(new Configuration()),
new Path(localDir.getAbsolutePath()), perm);
assertTrue("checkDir success", success);
}
catch (DiskErrorException e) {
} catch (DiskErrorException e) {
assertFalse("checkDir success", success);
e.printStackTrace();
}
System.out.println("checkDir success: "+ success);
localDir.delete();
}
/**
@ -168,8 +163,10 @@ public void testCheckDir_notListable_local() throws Throwable {
private void _checkDirs(boolean isDir, String perm, boolean success)
throws Throwable {
File localDir = File.createTempFile("test", "tmp");
localDir.delete();
localDir.mkdir();
if (isDir) {
localDir.delete();
localDir.mkdir();
}
Shell.execCommand(Shell.getSetPermissionCommand(perm, false,
localDir.getAbsolutePath()));
try {