Commit the test and the conf changes in common for HDFS-2284.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1166009 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2011-09-07 06:05:04 +00:00
parent 0ecba04727
commit 1d6793d0b7
3 changed files with 13 additions and 8 deletions

View File

@ -317,6 +317,11 @@
<value>org.apache.hadoop.hdfs.HsftpFileSystem</value>
</property>
<property>
<name>fs.webhdfs.impl</name>
<value>org.apache.hadoop.hdfs.web.WebHdfsFileSystem</value>
</property>
<property>
<name>fs.ftp.impl</name>
<value>org.apache.hadoop.fs.ftp.FTPFileSystem</value>

View File

@ -32,6 +32,7 @@ import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mortbay.log.Log;
import static org.apache.hadoop.fs.FileSystemTestHelper.*;
@ -62,8 +63,6 @@ public abstract class FSMainOperationsBaseTest {
private static String TEST_DIR_AXX = "test/hadoop/axx";
private static int numBlocks = 2;
static final String LOCAL_FS_ROOT_URI = "file:///tmp/test";
protected static FileSystem fSys;
@ -83,7 +82,7 @@ public abstract class FSMainOperationsBaseTest {
}
};
private static byte[] data = getFileData(numBlocks,
protected static final byte[] data = getFileData(numBlocks,
getDefaultBlockSize());
@Before
@ -183,7 +182,7 @@ public abstract class FSMainOperationsBaseTest {
@Test
public void testWDAbsolute() throws IOException {
Path absoluteDir = new Path(LOCAL_FS_ROOT_URI + "/existingDir");
Path absoluteDir = new Path(fSys.getUri() + "/test/existingDir");
fSys.mkdirs(absoluteDir);
fSys.setWorkingDirectory(absoluteDir);
Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
@ -646,7 +645,7 @@ public abstract class FSMainOperationsBaseTest {
writeReadAndDelete(getDefaultBlockSize() * 2);
}
private void writeReadAndDelete(int len) throws IOException {
protected void writeReadAndDelete(int len) throws IOException {
Path path = getTestRootPath(fSys, "test/hadoop/file");
fSys.mkdirs(path.getParent());
@ -768,6 +767,7 @@ public abstract class FSMainOperationsBaseTest {
rename(src, dst, false, false, false, Rename.NONE);
Assert.fail("Should throw FileNotFoundException");
} catch (IOException e) {
Log.info("XXX", e);
Assert.assertTrue(unwrapException(e) instanceof FileNotFoundException);
}

View File

@ -45,7 +45,7 @@ import org.apache.hadoop.fs.Path;
public abstract class FileSystemContractBaseTest extends TestCase {
protected FileSystem fs;
private byte[] data = new byte[getBlockSize() * 2]; // two blocks of data
protected byte[] data = new byte[getBlockSize() * 2]; // two blocks of data
{
for (int i = 0; i < data.length; i++) {
data[i] = (byte) (i % 10);
@ -215,7 +215,7 @@ public abstract class FileSystemContractBaseTest extends TestCase {
writeReadAndDelete(getBlockSize() * 2);
}
private void writeReadAndDelete(int len) throws IOException {
protected void writeReadAndDelete(int len) throws IOException {
Path path = path("/test/hadoop/file");
fs.mkdirs(path.getParent());
@ -256,7 +256,7 @@ public abstract class FileSystemContractBaseTest extends TestCase {
assertEquals("Length", data.length, fs.getFileStatus(path).getLen());
try {
fs.create(path, false);
fs.create(path, false).close();
fail("Should throw IOException.");
} catch (IOException e) {
// Expected