HDFS-4586. Merge r1455708 from trunk

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1490495 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2013-06-07 02:08:44 +00:00
parent f07af53a34
commit 2910b06dfa
3 changed files with 37 additions and 22 deletions

View File

@ -348,6 +348,9 @@ Release 2.1.0-beta - UNRELEASED
HADOOP-8957 HDFS tests for AbstractFileSystem#IsValidName should be overridden for
embedded file systems like ViewFs (Chris Nauroth via Sanjay Radia)
HDFS-4586. TestDataDirs.testGetDataDirsFromURIs fails with all directories
in dfs.datanode.data.dir are invalid. (Ivan Mitic via atm)
BREAKDOWN OF HDFS-347 SUBTASKS AND RELATED JIRAS
HDFS-4353. Encapsulate connections to peers in Peer and PeerServer classes.

View File

@ -1755,6 +1755,21 @@ public class DataNode extends Configured
}
}
// Small wrapper around the DiskChecker class that provides means to mock
// DiskChecker static methods and unittest DataNode#getDataDirsFromURIs.
static class DataNodeDiskChecker {
private FsPermission expectedPermission;
public DataNodeDiskChecker(FsPermission expectedPermission) {
this.expectedPermission = expectedPermission;
}
public void checkDir(LocalFileSystem localFS, Path path)
throws DiskErrorException, IOException {
DiskChecker.checkDir(localFS, path, expectedPermission);
}
}
/**
* Make an instance of DataNode after ensuring that at least one of the
* given data directories (and their parent directories, if necessary)
@ -1773,7 +1788,10 @@ public class DataNode extends Configured
FsPermission permission = new FsPermission(
conf.get(DFS_DATANODE_DATA_DIR_PERMISSION_KEY,
DFS_DATANODE_DATA_DIR_PERMISSION_DEFAULT));
ArrayList<File> dirs = getDataDirsFromURIs(dataDirs, localFS, permission);
DataNodeDiskChecker dataNodeDiskChecker =
new DataNodeDiskChecker(permission);
ArrayList<File> dirs =
getDataDirsFromURIs(dataDirs, localFS, dataNodeDiskChecker);
DefaultMetricsSystem.initialize("DataNode");
assert dirs.size() > 0 : "number of data directories should be > 0";
@ -1782,7 +1800,8 @@ public class DataNode extends Configured
// DataNode ctor expects AbstractList instead of List or Collection...
static ArrayList<File> getDataDirsFromURIs(Collection<URI> dataDirs,
LocalFileSystem localFS, FsPermission permission) throws IOException {
LocalFileSystem localFS, DataNodeDiskChecker dataNodeDiskChecker)
throws IOException {
ArrayList<File> dirs = new ArrayList<File>();
StringBuilder invalidDirs = new StringBuilder();
for (URI dirURI : dataDirs) {
@ -1794,7 +1813,7 @@ public class DataNode extends Configured
// drop any (illegal) authority in the URI for backwards compatibility
File dir = new File(dirURI.getPath());
try {
DiskChecker.checkDir(localFS, new Path(dir.toURI()), permission);
dataNodeDiskChecker.checkDir(localFS, new Path(dir.toURI()));
dirs.add(dir);
} catch (IOException ioe) {
LOG.warn("Invalid " + DFS_DATANODE_DATA_DIR_KEY + " "

View File

@ -27,33 +27,26 @@ import java.util.List;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.apache.hadoop.test.MockitoMaker.*;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.server.datanode.DataNode.DataNodeDiskChecker;
public class TestDataDirs {
@Test public void testGetDataDirsFromURIs() throws Throwable {
File localDir = make(stub(File.class).returning(true).from.exists());
when(localDir.mkdir()).thenReturn(true);
FsPermission normalPerm = new FsPermission("700");
FsPermission badPerm = new FsPermission("000");
FileStatus stat = make(stub(FileStatus.class)
.returning(normalPerm, normalPerm, badPerm).from.getPermission());
when(stat.isDirectory()).thenReturn(true);
LocalFileSystem fs = make(stub(LocalFileSystem.class)
.returning(stat).from.getFileStatus(any(Path.class)));
when(fs.pathToFile(any(Path.class))).thenReturn(localDir);
@Test (timeout = 10000)
public void testGetDataDirsFromURIs() throws Throwable {
DataNodeDiskChecker diskChecker = mock(DataNodeDiskChecker.class);
doThrow(new IOException()).doThrow(new IOException()).doNothing()
.when(diskChecker).checkDir(any(LocalFileSystem.class), any(Path.class));
LocalFileSystem fs = mock(LocalFileSystem.class);
Collection<URI> uris = Arrays.asList(new URI("file:/p1/"),
new URI("file:/p2/"), new URI("file:/p3/"));
List<File> dirs = DataNode.getDataDirsFromURIs(uris, fs, normalPerm);
verify(fs, times(2)).setPermission(any(Path.class), eq(normalPerm));
verify(fs, times(6)).getFileStatus(any(Path.class));
assertEquals("number of valid data dirs", dirs.size(), 1);
List<File> dirs = DataNode.getDataDirsFromURIs(uris, fs, diskChecker);
assertEquals("number of valid data dirs", 1, dirs.size());
String validDir = dirs.iterator().next().getPath();
assertEquals("p3 should be valid", new File("/p3").getPath(), validDir);
}
}