diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index d915723fd4b..025c761688f 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -147,6 +147,9 @@ Release 2.8.0 - UNRELEASED HADOOP-12059. S3Credentials should support use of CredentialProvider. (Sean Busbey via wang) + HADOOP-12056. Use DirectoryStream in DiskChecker#checkDirs to detect + errors when listing a directory. (Zhihai Xu via wang) + OPTIMIZATIONS HADOOP-11785. Reduce the number of listStatus operation in distcp diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java index 6b27ae5397d..a36a7a0f70e 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/DiskChecker.java @@ -20,6 +20,9 @@ package org.apache.hadoop.util; import java.io.File; import java.io.IOException; +import java.nio.file.DirectoryStream; +import java.nio.file.DirectoryIteratorException; +import java.nio.file.Files; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -86,13 +89,26 @@ public class DiskChecker { */ public static void checkDirs(File dir) throws DiskErrorException { checkDir(dir); - for (File child : dir.listFiles()) { - if (child.isDirectory()) { - checkDirs(child); + IOException ex = null; + try (DirectoryStream stream = + Files.newDirectoryStream(dir.toPath())) { + for (java.nio.file.Path entry: stream) { + File child = entry.toFile(); + if (child.isDirectory()) { + checkDirs(child); + } } + } catch (DirectoryIteratorException de) { + ex = de.getCause(); + } catch (IOException ie) { + ex = ie; + } + if (ex != null) { + throw new DiskErrorException("I/O error when open a directory: " + + dir.toString(), ex); } } - + /** * Create the directory if it doesn't exist and check that dir is readable, * writable and executable diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java index 5ab1313d5ea..de547351175 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestDiskChecker.java @@ -32,6 +32,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.DiskChecker.DiskErrorException; import org.apache.hadoop.util.Shell; @@ -180,4 +181,25 @@ public class TestDiskChecker { System.out.println("checkDir success: " + success); } + + @Test (timeout = 30000) + public void testCheckDirsIOException() throws Throwable { + Path path = new Path("target", TestDiskChecker.class.getSimpleName()); + File localDir = new File(path.toUri().getRawPath()); + localDir.mkdir(); + File localFile = new File(localDir, "test"); + localFile.createNewFile(); + File spyLocalDir = spy(localDir); + doReturn(localFile.toPath()).when(spyLocalDir).toPath(); + try { + DiskChecker.checkDirs(spyLocalDir); + fail("Expected exception for I/O error"); + } catch (DiskErrorException e) { + GenericTestUtils.assertExceptionContains("I/O error", e); + assertTrue(e.getCause() instanceof IOException); + } finally { + localFile.delete(); + localDir.delete(); + } + } }