HADOOP-13737. Cleanup DiskChecker interface. Contributed by Arpit Agarwal.

This commit is contained in:
Anu Engineer 2016-10-20 13:26:23 -07:00
parent 5e83a21cb6
commit 262827cf75
2 changed files with 80 additions and 144 deletions

View File

@ -20,9 +20,6 @@ package org.apache.hadoop.util;
import java.io.File;
import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.DirectoryIteratorException;
import java.nio.file.Files;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@ -53,6 +50,69 @@ public class DiskChecker {
}
}
/**
* Create the directory if it doesn't exist and check that dir is readable,
* writable and executable
*
* @param dir
* @throws DiskErrorException
*/
public static void checkDir(File dir) throws DiskErrorException {
if (!mkdirsWithExistsCheck(dir)) {
throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
}
checkAccessByFileMethods(dir);
}
/**
* Create the local directory if necessary, check permissions and also ensure
* it can be read from and written into.
*
* @param localFS local filesystem
* @param dir directory
* @param expected permission
* @throws DiskErrorException
* @throws IOException
*/
public static void checkDir(LocalFileSystem localFS, Path dir,
FsPermission expected)
throws DiskErrorException, IOException {
mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
checkAccessByFileMethods(localFS.pathToFile(dir));
}
/**
* Checks that the current running process can read, write, and execute the
* given directory by using methods of the File object.
*
* @param dir File to check
* @throws DiskErrorException if dir is not readable, not writable, or not
* executable
*/
private static void checkAccessByFileMethods(File dir)
throws DiskErrorException {
if (!dir.isDirectory()) {
throw new DiskErrorException("Not a directory: "
+ dir.toString());
}
if (!FileUtil.canRead(dir)) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
if (!FileUtil.canWrite(dir)) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
if (!FileUtil.canExecute(dir)) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
}
/**
* The semantics of mkdirsWithExistsCheck method is different from the mkdirs
* method provided in the Sun's java.io.File class in the following way:
@ -66,11 +126,11 @@ public class DiskChecker {
* @param dir
* @return true on success, false on failure
*/
public static boolean mkdirsWithExistsCheck(File dir) {
private static boolean mkdirsWithExistsCheck(File dir) {
if (dir.mkdir() || dir.exists()) {
return true;
}
File canonDir = null;
File canonDir;
try {
canonDir = dir.getCanonicalFile();
} catch (IOException e) {
@ -82,48 +142,6 @@ public class DiskChecker {
(canonDir.mkdir() || canonDir.exists()));
}
/**
* Recurse down a directory tree, checking all child directories.
* @param dir
* @throws DiskErrorException
*/
public static void checkDirs(File dir) throws DiskErrorException {
checkDir(dir);
IOException ex = null;
try (DirectoryStream<java.nio.file.Path> stream =
Files.newDirectoryStream(dir.toPath())) {
for (java.nio.file.Path entry: stream) {
File child = entry.toFile();
if (child.isDirectory()) {
checkDirs(child);
}
}
} catch (DirectoryIteratorException de) {
ex = de.getCause();
} catch (IOException ie) {
ex = ie;
}
if (ex != null) {
throw new DiskErrorException("I/O error when open a directory: "
+ dir.toString(), ex);
}
}
/**
* Create the directory if it doesn't exist and check that dir is readable,
* writable and executable
*
* @param dir
* @throws DiskErrorException
*/
public static void checkDir(File dir) throws DiskErrorException {
if (!mkdirsWithExistsCheck(dir)) {
throw new DiskErrorException("Cannot create directory: "
+ dir.toString());
}
checkDirAccess(dir);
}
/**
* Create the directory or check permissions if it already exists.
*
@ -143,7 +161,7 @@ public class DiskChecker {
* @param expected expected permission
* @throws IOException
*/
public static void mkdirsWithExistsAndPermissionCheck(
static void mkdirsWithExistsAndPermissionCheck(
LocalFileSystem localFS, Path dir, FsPermission expected)
throws IOException {
File directory = localFS.pathToFile(dir);
@ -155,64 +173,4 @@ public class DiskChecker {
if (created || !localFS.getFileStatus(dir).getPermission().equals(expected))
localFS.setPermission(dir, expected);
}
/**
* Create the local directory if necessary, check permissions and also ensure
* it can be read from and written into.
*
* @param localFS local filesystem
* @param dir directory
* @param expected permission
* @throws DiskErrorException
* @throws IOException
*/
public static void checkDir(LocalFileSystem localFS, Path dir,
FsPermission expected)
throws DiskErrorException, IOException {
mkdirsWithExistsAndPermissionCheck(localFS, dir, expected);
checkDirAccess(localFS.pathToFile(dir));
}
/**
* Checks that the given file is a directory and that the current running
* process can read, write, and execute it.
*
* @param dir File to check
* @throws DiskErrorException if dir is not a directory, not readable, not
* writable, or not executable
*/
private static void checkDirAccess(File dir) throws DiskErrorException {
if (!dir.isDirectory()) {
throw new DiskErrorException("Not a directory: "
+ dir.toString());
}
checkAccessByFileMethods(dir);
}
/**
* Checks that the current running process can read, write, and execute the
* given directory by using methods of the File object.
*
* @param dir File to check
* @throws DiskErrorException if dir is not readable, not writable, or not
* executable
*/
private static void checkAccessByFileMethods(File dir)
throws DiskErrorException {
if (!FileUtil.canRead(dir)) {
throw new DiskErrorException("Directory is not readable: "
+ dir.toString());
}
if (!FileUtil.canWrite(dir)) {
throw new DiskErrorException("Directory is not writable: "
+ dir.toString());
}
if (!FileUtil.canExecute(dir)) {
throw new DiskErrorException("Directory is not executable: "
+ dir.toString());
}
}
}

View File

@ -32,7 +32,6 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
public class TestDiskChecker {
@ -192,25 +191,4 @@ public class TestDiskChecker {
System.out.println("checkDir success: " + success);
}
@Test (timeout = 30000)
public void testCheckDirsIOException() throws Throwable {
Path path = new Path("target", TestDiskChecker.class.getSimpleName());
File localDir = new File(path.toUri().getRawPath());
localDir.mkdir();
File localFile = new File(localDir, "test");
localFile.createNewFile();
File spyLocalDir = spy(localDir);
doReturn(localFile.toPath()).when(spyLocalDir).toPath();
try {
DiskChecker.checkDirs(spyLocalDir);
fail("Expected exception for I/O error");
} catch (DiskErrorException e) {
GenericTestUtils.assertExceptionContains("I/O error", e);
assertTrue(e.getCause() instanceof IOException);
} finally {
localFile.delete();
localDir.delete();
}
}
}