HADOOP-12056. Use DirectoryStream in DiskChecker#checkDirs to detect errors when listing a directory. Contributed by Zhihai Xu.

This commit is contained in:
Andrew Wang 2015-06-05 13:52:21 -07:00
parent 2dbc40e608
commit bc11e158b1
3 changed files with 45 additions and 4 deletions

View File

@ -637,6 +637,9 @@ Release 2.8.0 - UNRELEASED
HADOOP-12059. S3Credentials should support use of CredentialProvider. HADOOP-12059. S3Credentials should support use of CredentialProvider.
(Sean Busbey via wang) (Sean Busbey via wang)
HADOOP-12056. Use DirectoryStream in DiskChecker#checkDirs to detect
errors when listing a directory. (Zhihai Xu via wang)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-11785. Reduce the number of listStatus operation in distcp HADOOP-11785. Reduce the number of listStatus operation in distcp

View File

@ -20,6 +20,9 @@ package org.apache.hadoop.util;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
import java.nio.file.DirectoryStream;
import java.nio.file.DirectoryIteratorException;
import java.nio.file.Files;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
@ -86,11 +89,24 @@ public class DiskChecker {
*/ */
public static void checkDirs(File dir) throws DiskErrorException { public static void checkDirs(File dir) throws DiskErrorException {
checkDir(dir); checkDir(dir);
for (File child : dir.listFiles()) { IOException ex = null;
try (DirectoryStream<java.nio.file.Path> stream =
Files.newDirectoryStream(dir.toPath())) {
for (java.nio.file.Path entry: stream) {
File child = entry.toFile();
if (child.isDirectory()) { if (child.isDirectory()) {
checkDirs(child); checkDirs(child);
} }
} }
} catch (DirectoryIteratorException de) {
ex = de.getCause();
} catch (IOException ie) {
ex = ie;
}
if (ex != null) {
throw new DiskErrorException("I/O error when open a directory: "
+ dir.toString(), ex);
}
} }
/** /**

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.DiskChecker.DiskErrorException;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
@ -180,4 +181,25 @@ public class TestDiskChecker {
System.out.println("checkDir success: " + success); System.out.println("checkDir success: " + success);
} }
@Test (timeout = 30000)
public void testCheckDirsIOException() throws Throwable {
Path path = new Path("target", TestDiskChecker.class.getSimpleName());
File localDir = new File(path.toUri().getRawPath());
localDir.mkdir();
File localFile = new File(localDir, "test");
localFile.createNewFile();
File spyLocalDir = spy(localDir);
doReturn(localFile.toPath()).when(spyLocalDir).toPath();
try {
DiskChecker.checkDirs(spyLocalDir);
fail("Expected exception for I/O error");
} catch (DiskErrorException e) {
GenericTestUtils.assertExceptionContains("I/O error", e);
assertTrue(e.getCause() instanceof IOException);
} finally {
localFile.delete();
localDir.delete();
}
}
} }