From 813e97494aff39bf8b72f9a32c5a157345f7543f Mon Sep 17 00:00:00 2001 From: Aaron Myers Date: Tue, 12 Mar 2013 20:55:33 +0000 Subject: [PATCH] HDFS-4586. TestDataDirs.testGetDataDirsFromURIs fails with all directories in dfs.datanode.data.dir are invalid. Contributed by Ivan Mitic. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1455708 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../hadoop/hdfs/server/datanode/DataNode.java | 25 +++++++++++++-- .../hdfs/server/datanode/TestDataDirs.java | 31 +++++++------------ 3 files changed, 37 insertions(+), 22 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 3760157078b..2c3910cc8c0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -309,6 +309,9 @@ Trunk (Unreleased) HDFS-4391. TestDataTransferKeepalive fails when tests are executed in a certain order. (Andrew Wang via atm) + HDFS-4586. TestDataDirs.testGetDataDirsFromURIs fails with all directories + in dfs.datanode.data.dir are invalid. (Ivan Mitic via atm) + BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS HDFS-4145. Merge hdfs cmd line scripts from branch-1-win. (David Lao, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 375c6954a7f..10a34f43be7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -1617,6 +1617,21 @@ public class DataNode extends Configured } } + // Small wrapper around the DiskChecker class that provides means to mock + // DiskChecker static methods and unittest DataNode#getDataDirsFromURIs. + static class DataNodeDiskChecker { + private FsPermission expectedPermission; + + public DataNodeDiskChecker(FsPermission expectedPermission) { + this.expectedPermission = expectedPermission; + } + + public void checkDir(LocalFileSystem localFS, Path path) + throws DiskErrorException, IOException { + DiskChecker.checkDir(localFS, path, expectedPermission); + } + } + /** * Make an instance of DataNode after ensuring that at least one of the * given data directories (and their parent directories, if necessary) @@ -1635,7 +1650,10 @@ public class DataNode extends Configured FsPermission permission = new FsPermission( conf.get(DFS_DATANODE_DATA_DIR_PERMISSION_KEY, DFS_DATANODE_DATA_DIR_PERMISSION_DEFAULT)); - ArrayList dirs = getDataDirsFromURIs(dataDirs, localFS, permission); + DataNodeDiskChecker dataNodeDiskChecker = + new DataNodeDiskChecker(permission); + ArrayList dirs = + getDataDirsFromURIs(dataDirs, localFS, dataNodeDiskChecker); DefaultMetricsSystem.initialize("DataNode"); assert dirs.size() > 0 : "number of data directories should be > 0"; @@ -1644,7 +1662,8 @@ public class DataNode extends Configured // DataNode ctor expects AbstractList instead of List or Collection... static ArrayList getDataDirsFromURIs(Collection dataDirs, - LocalFileSystem localFS, FsPermission permission) throws IOException { + LocalFileSystem localFS, DataNodeDiskChecker dataNodeDiskChecker) + throws IOException { ArrayList dirs = new ArrayList(); StringBuilder invalidDirs = new StringBuilder(); for (URI dirURI : dataDirs) { @@ -1656,7 +1675,7 @@ public class DataNode extends Configured // drop any (illegal) authority in the URI for backwards compatibility File dir = new File(dirURI.getPath()); try { - DiskChecker.checkDir(localFS, new Path(dir.toURI()), permission); + dataNodeDiskChecker.checkDir(localFS, new Path(dir.toURI())); dirs.add(dir); } catch (IOException ioe) { LOG.warn("Invalid " + DFS_DATANODE_DATA_DIR_KEY + " " diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java index 9226beaf297..fc7a3ff01d1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java @@ -27,33 +27,26 @@ import java.util.List; import org.junit.Test; import static org.junit.Assert.*; import static org.mockito.Mockito.*; -import static org.apache.hadoop.test.MockitoMaker.*; -import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.LocalFileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.fs.permission.FsPermission; +import org.apache.hadoop.hdfs.server.datanode.DataNode.DataNodeDiskChecker; public class TestDataDirs { - @Test public void testGetDataDirsFromURIs() throws Throwable { - File localDir = make(stub(File.class).returning(true).from.exists()); - when(localDir.mkdir()).thenReturn(true); - FsPermission normalPerm = new FsPermission("700"); - FsPermission badPerm = new FsPermission("000"); - FileStatus stat = make(stub(FileStatus.class) - .returning(normalPerm, normalPerm, badPerm).from.getPermission()); - when(stat.isDirectory()).thenReturn(true); - LocalFileSystem fs = make(stub(LocalFileSystem.class) - .returning(stat).from.getFileStatus(any(Path.class))); - when(fs.pathToFile(any(Path.class))).thenReturn(localDir); + @Test (timeout = 10000) + public void testGetDataDirsFromURIs() throws Throwable { + + DataNodeDiskChecker diskChecker = mock(DataNodeDiskChecker.class); + doThrow(new IOException()).doThrow(new IOException()).doNothing() + .when(diskChecker).checkDir(any(LocalFileSystem.class), any(Path.class)); + LocalFileSystem fs = mock(LocalFileSystem.class); Collection uris = Arrays.asList(new URI("file:/p1/"), new URI("file:/p2/"), new URI("file:/p3/")); - List dirs = DataNode.getDataDirsFromURIs(uris, fs, normalPerm); - - verify(fs, times(2)).setPermission(any(Path.class), eq(normalPerm)); - verify(fs, times(6)).getFileStatus(any(Path.class)); - assertEquals("number of valid data dirs", dirs.size(), 1); + List dirs = DataNode.getDataDirsFromURIs(uris, fs, diskChecker); + assertEquals("number of valid data dirs", 1, dirs.size()); + String validDir = dirs.iterator().next().getPath(); + assertEquals("p3 should be valid", new File("/p3").getPath(), validDir); } }