HDFS-4470. Several HDFS tests attempt file operations on invalid HDFS paths when running on Windows. Contributed by Chris Nauroth.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1443744 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c3d09010c7
commit
7114a61318
|
@ -45,19 +45,39 @@ public class TestListFiles {
|
||||||
|
|
||||||
final protected static Configuration conf = new Configuration();
|
final protected static Configuration conf = new Configuration();
|
||||||
protected static FileSystem fs;
|
protected static FileSystem fs;
|
||||||
final protected static Path TEST_DIR = getTestDir();
|
protected static Path TEST_DIR;
|
||||||
final private static int FILE_LEN = 10;
|
final private static int FILE_LEN = 10;
|
||||||
final private static Path FILE1 = new Path(TEST_DIR, "file1");
|
private static Path FILE1;
|
||||||
final private static Path DIR1 = new Path(TEST_DIR, "dir1");
|
private static Path DIR1;
|
||||||
final private static Path FILE2 = new Path(DIR1, "file2");
|
private static Path FILE2;
|
||||||
final private static Path FILE3 = new Path(DIR1, "file3");
|
private static Path FILE3;
|
||||||
|
|
||||||
|
static {
|
||||||
|
setTestPaths(new Path(
|
||||||
|
System.getProperty("test.build.data", "build/test/data/work-dir/localfs"),
|
||||||
|
"main_"));
|
||||||
|
}
|
||||||
|
|
||||||
protected static Path getTestDir() {
|
protected static Path getTestDir() {
|
||||||
return new Path(
|
return TEST_DIR;
|
||||||
System.getProperty("test.build.data","build/test/data/work-dir/localfs"),
|
|
||||||
"main_");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sets the root testing directory and reinitializes any additional test paths
|
||||||
|
* that are under the root. This method is intended to be called from a
|
||||||
|
* subclass's @BeforeClass method if there is a need to override the testing
|
||||||
|
* directory.
|
||||||
|
*
|
||||||
|
* @param testDir Path root testing directory
|
||||||
|
*/
|
||||||
|
protected static void setTestPaths(Path testDir) {
|
||||||
|
TEST_DIR = testDir;
|
||||||
|
FILE1 = new Path(TEST_DIR, "file1");
|
||||||
|
DIR1 = new Path(TEST_DIR, "dir1");
|
||||||
|
FILE2 = new Path(DIR1, "file2");
|
||||||
|
FILE3 = new Path(DIR1, "file3");
|
||||||
|
}
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void testSetUp() throws Exception {
|
public static void testSetUp() throws Exception {
|
||||||
fs = FileSystem.getLocal(conf);
|
fs = FileSystem.getLocal(conf);
|
||||||
|
|
|
@ -310,6 +310,9 @@ Release 2.0.4-beta - UNRELEASED
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
||||||
|
HDFS-4470. Several HDFS tests attempt file operations on invalid HDFS
|
||||||
|
paths when running on Windows. (Chris Nauroth via suresh)
|
||||||
|
|
||||||
Release 2.0.3-alpha - 2013-02-06
|
Release 2.0.3-alpha - 2013-02-06
|
||||||
|
|
||||||
|
|
|
@ -43,7 +43,7 @@ public class TestFileLengthOnClusterRestart {
|
||||||
.numDataNodes(2).build();
|
.numDataNodes(2).build();
|
||||||
HdfsDataInputStream in = null;
|
HdfsDataInputStream in = null;
|
||||||
try {
|
try {
|
||||||
Path path = new Path(MiniDFSCluster.getBaseDirectory(), "test");
|
Path path = new Path("/tmp/TestFileLengthOnClusterRestart", "test");
|
||||||
DistributedFileSystem dfs = (DistributedFileSystem) cluster
|
DistributedFileSystem dfs = (DistributedFileSystem) cluster
|
||||||
.getFileSystem();
|
.getFileSystem();
|
||||||
FSDataOutputStream out = dfs.create(path);
|
FSDataOutputStream out = dfs.create(path);
|
||||||
|
|
|
@ -183,8 +183,7 @@ public class TestLargeBlock {
|
||||||
try {
|
try {
|
||||||
|
|
||||||
// create a new file in test data directory
|
// create a new file in test data directory
|
||||||
Path file1 = new Path(System.getProperty("test.build.data") + "/" +
|
Path file1 = new Path("/tmp/TestLargeBlock", blockSize + ".dat");
|
||||||
Long.toString(blockSize) + ".dat");
|
|
||||||
FSDataOutputStream stm = createFile(fs, file1, 1, blockSize);
|
FSDataOutputStream stm = createFile(fs, file1, 1, blockSize);
|
||||||
LOG.info("File " + file1 + " created with file size " +
|
LOG.info("File " + file1 + " created with file size " +
|
||||||
fileSize +
|
fileSize +
|
||||||
|
|
|
@ -38,6 +38,7 @@ public class TestListFilesInDFS extends TestListFiles {
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void testSetUp() throws Exception {
|
public static void testSetUp() throws Exception {
|
||||||
|
setTestPaths(new Path("/tmp/TestListFilesInDFS"));
|
||||||
cluster = new MiniDFSCluster.Builder(conf).build();
|
cluster = new MiniDFSCluster.Builder(conf).build();
|
||||||
fs = cluster.getFileSystem();
|
fs = cluster.getFileSystem();
|
||||||
fs.delete(TEST_DIR, true);
|
fs.delete(TEST_DIR, true);
|
||||||
|
|
|
@ -67,7 +67,7 @@ public class TestRBWBlockInvalidation {
|
||||||
try {
|
try {
|
||||||
final FSNamesystem namesystem = cluster.getNamesystem();
|
final FSNamesystem namesystem = cluster.getNamesystem();
|
||||||
FileSystem fs = cluster.getFileSystem();
|
FileSystem fs = cluster.getFileSystem();
|
||||||
Path testPath = new Path(MiniDFSCluster.getBaseDirectory(), "foo1");
|
Path testPath = new Path("/tmp/TestRBWBlockInvalidation", "foo1");
|
||||||
out = fs.create(testPath, (short) 2);
|
out = fs.create(testPath, (short) 2);
|
||||||
out.writeBytes("HDFS-3157: " + testPath);
|
out.writeBytes("HDFS-3157: " + testPath);
|
||||||
out.hsync();
|
out.hsync();
|
||||||
|
|
Loading…
Reference in New Issue