HDFS-4586. TestDataDirs.testGetDataDirsFromURIs fails with all directories in dfs.datanode.data.dir are invalid. Contributed by Ivan Mitic.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1455708 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6a9ccd809b
commit
813e97494a
|
@ -309,6 +309,9 @@ Trunk (Unreleased)
|
||||||
HDFS-4391. TestDataTransferKeepalive fails when tests are executed in a
|
HDFS-4391. TestDataTransferKeepalive fails when tests are executed in a
|
||||||
certain order. (Andrew Wang via atm)
|
certain order. (Andrew Wang via atm)
|
||||||
|
|
||||||
|
HDFS-4586. TestDataDirs.testGetDataDirsFromURIs fails with all directories
|
||||||
|
in dfs.datanode.data.dir are invalid. (Ivan Mitic via atm)
|
||||||
|
|
||||||
BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS
|
BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS
|
||||||
|
|
||||||
HDFS-4145. Merge hdfs cmd line scripts from branch-1-win. (David Lao,
|
HDFS-4145. Merge hdfs cmd line scripts from branch-1-win. (David Lao,
|
||||||
|
|
|
@ -1617,6 +1617,21 @@ public class DataNode extends Configured
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Small wrapper around the DiskChecker class that provides means to mock
|
||||||
|
// DiskChecker static methods and unittest DataNode#getDataDirsFromURIs.
|
||||||
|
static class DataNodeDiskChecker {
|
||||||
|
private FsPermission expectedPermission;
|
||||||
|
|
||||||
|
public DataNodeDiskChecker(FsPermission expectedPermission) {
|
||||||
|
this.expectedPermission = expectedPermission;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void checkDir(LocalFileSystem localFS, Path path)
|
||||||
|
throws DiskErrorException, IOException {
|
||||||
|
DiskChecker.checkDir(localFS, path, expectedPermission);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Make an instance of DataNode after ensuring that at least one of the
|
* Make an instance of DataNode after ensuring that at least one of the
|
||||||
* given data directories (and their parent directories, if necessary)
|
* given data directories (and their parent directories, if necessary)
|
||||||
|
@ -1635,7 +1650,10 @@ public class DataNode extends Configured
|
||||||
FsPermission permission = new FsPermission(
|
FsPermission permission = new FsPermission(
|
||||||
conf.get(DFS_DATANODE_DATA_DIR_PERMISSION_KEY,
|
conf.get(DFS_DATANODE_DATA_DIR_PERMISSION_KEY,
|
||||||
DFS_DATANODE_DATA_DIR_PERMISSION_DEFAULT));
|
DFS_DATANODE_DATA_DIR_PERMISSION_DEFAULT));
|
||||||
ArrayList<File> dirs = getDataDirsFromURIs(dataDirs, localFS, permission);
|
DataNodeDiskChecker dataNodeDiskChecker =
|
||||||
|
new DataNodeDiskChecker(permission);
|
||||||
|
ArrayList<File> dirs =
|
||||||
|
getDataDirsFromURIs(dataDirs, localFS, dataNodeDiskChecker);
|
||||||
DefaultMetricsSystem.initialize("DataNode");
|
DefaultMetricsSystem.initialize("DataNode");
|
||||||
|
|
||||||
assert dirs.size() > 0 : "number of data directories should be > 0";
|
assert dirs.size() > 0 : "number of data directories should be > 0";
|
||||||
|
@ -1644,7 +1662,8 @@ public class DataNode extends Configured
|
||||||
|
|
||||||
// DataNode ctor expects AbstractList instead of List or Collection...
|
// DataNode ctor expects AbstractList instead of List or Collection...
|
||||||
static ArrayList<File> getDataDirsFromURIs(Collection<URI> dataDirs,
|
static ArrayList<File> getDataDirsFromURIs(Collection<URI> dataDirs,
|
||||||
LocalFileSystem localFS, FsPermission permission) throws IOException {
|
LocalFileSystem localFS, DataNodeDiskChecker dataNodeDiskChecker)
|
||||||
|
throws IOException {
|
||||||
ArrayList<File> dirs = new ArrayList<File>();
|
ArrayList<File> dirs = new ArrayList<File>();
|
||||||
StringBuilder invalidDirs = new StringBuilder();
|
StringBuilder invalidDirs = new StringBuilder();
|
||||||
for (URI dirURI : dataDirs) {
|
for (URI dirURI : dataDirs) {
|
||||||
|
@ -1656,7 +1675,7 @@ public class DataNode extends Configured
|
||||||
// drop any (illegal) authority in the URI for backwards compatibility
|
// drop any (illegal) authority in the URI for backwards compatibility
|
||||||
File dir = new File(dirURI.getPath());
|
File dir = new File(dirURI.getPath());
|
||||||
try {
|
try {
|
||||||
DiskChecker.checkDir(localFS, new Path(dir.toURI()), permission);
|
dataNodeDiskChecker.checkDir(localFS, new Path(dir.toURI()));
|
||||||
dirs.add(dir);
|
dirs.add(dir);
|
||||||
} catch (IOException ioe) {
|
} catch (IOException ioe) {
|
||||||
LOG.warn("Invalid " + DFS_DATANODE_DATA_DIR_KEY + " "
|
LOG.warn("Invalid " + DFS_DATANODE_DATA_DIR_KEY + " "
|
||||||
|
|
|
@ -27,33 +27,26 @@ import java.util.List;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.*;
|
||||||
import static org.mockito.Mockito.*;
|
import static org.mockito.Mockito.*;
|
||||||
import static org.apache.hadoop.test.MockitoMaker.*;
|
|
||||||
|
|
||||||
import org.apache.hadoop.fs.FileStatus;
|
|
||||||
import org.apache.hadoop.fs.LocalFileSystem;
|
import org.apache.hadoop.fs.LocalFileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode.DataNodeDiskChecker;
|
||||||
|
|
||||||
public class TestDataDirs {
|
public class TestDataDirs {
|
||||||
|
|
||||||
@Test public void testGetDataDirsFromURIs() throws Throwable {
|
@Test (timeout = 10000)
|
||||||
File localDir = make(stub(File.class).returning(true).from.exists());
|
public void testGetDataDirsFromURIs() throws Throwable {
|
||||||
when(localDir.mkdir()).thenReturn(true);
|
|
||||||
FsPermission normalPerm = new FsPermission("700");
|
DataNodeDiskChecker diskChecker = mock(DataNodeDiskChecker.class);
|
||||||
FsPermission badPerm = new FsPermission("000");
|
doThrow(new IOException()).doThrow(new IOException()).doNothing()
|
||||||
FileStatus stat = make(stub(FileStatus.class)
|
.when(diskChecker).checkDir(any(LocalFileSystem.class), any(Path.class));
|
||||||
.returning(normalPerm, normalPerm, badPerm).from.getPermission());
|
LocalFileSystem fs = mock(LocalFileSystem.class);
|
||||||
when(stat.isDirectory()).thenReturn(true);
|
|
||||||
LocalFileSystem fs = make(stub(LocalFileSystem.class)
|
|
||||||
.returning(stat).from.getFileStatus(any(Path.class)));
|
|
||||||
when(fs.pathToFile(any(Path.class))).thenReturn(localDir);
|
|
||||||
Collection<URI> uris = Arrays.asList(new URI("file:/p1/"),
|
Collection<URI> uris = Arrays.asList(new URI("file:/p1/"),
|
||||||
new URI("file:/p2/"), new URI("file:/p3/"));
|
new URI("file:/p2/"), new URI("file:/p3/"));
|
||||||
|
|
||||||
List<File> dirs = DataNode.getDataDirsFromURIs(uris, fs, normalPerm);
|
List<File> dirs = DataNode.getDataDirsFromURIs(uris, fs, diskChecker);
|
||||||
|
assertEquals("number of valid data dirs", 1, dirs.size());
|
||||||
verify(fs, times(2)).setPermission(any(Path.class), eq(normalPerm));
|
String validDir = dirs.iterator().next().getPath();
|
||||||
verify(fs, times(6)).getFileStatus(any(Path.class));
|
assertEquals("p3 should be valid", new File("/p3").getPath(), validDir);
|
||||||
assertEquals("number of valid data dirs", dirs.size(), 1);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue