HDFS-10256. Use GenericTestUtils.getTestDir method in tests for temporary directories (Contributed by Vinayakumar B)

(cherry picked from commit cc6c265171)
This commit is contained in:
Vinayakumar B 2016-06-16 16:47:06 +05:30
parent 8280e3bcc2
commit 6ab1f545fb
40 changed files with 186 additions and 216 deletions

View File

@ -99,6 +99,20 @@ public class FileUtil {
return stat2Paths(stats); return stat2Paths(stats);
} }
/**
* Register all files recursively to be deleted on exit.
* @param file File/directory to be deleted
*/
public static void fullyDeleteOnExit(final File file) {
file.deleteOnExit();
if (file.isDirectory()) {
File[] files = file.listFiles();
for (File child : files) {
fullyDeleteOnExit(child);
}
}
}
/** /**
* Delete a directory and all its contents. If * Delete a directory and all its contents. If
* we return false, the directory may be partially-deleted. * we return false, the directory may be partially-deleted.

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -28,8 +29,8 @@ import java.io.IOException;
import static org.junit.Assert.*; import static org.junit.Assert.*;
public class TestGetSpaceUsed { public class TestGetSpaceUsed {
final static private File DIR = new File( final static private File DIR =
System.getProperty("test.build.data", "/tmp"), "TestGetSpaceUsed"); GenericTestUtils.getTestDir("TestGetSpaceUsed");
@Before @Before
public void setUp() { public void setUp() {

View File

@ -22,6 +22,7 @@ import org.apache.hadoop.hdfs.ExtendedBlockId;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.ShmId; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.ShmId;
import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.Slot; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.Slot;
import org.apache.hadoop.io.nativeio.SharedFileDescriptorFactory; import org.apache.hadoop.io.nativeio.SharedFileDescriptorFactory;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Assume; import org.junit.Assume;
import org.junit.Before; import org.junit.Before;
@ -38,8 +39,7 @@ public class TestShortCircuitShm {
public static final Logger LOG = LoggerFactory.getLogger( public static final Logger LOG = LoggerFactory.getLogger(
TestShortCircuitShm.class); TestShortCircuitShm.class);
private static final File TEST_BASE = private static final File TEST_BASE = GenericTestUtils.getTestDir();
new File(System.getProperty("test.build.data", "/tmp"));
@Before @Before
public void before() { public void before() {

View File

@ -23,6 +23,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem; import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.TestJettyHelper; import org.apache.hadoop.test.TestJettyHelper;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -37,8 +38,8 @@ import java.util.UUID;
public class TestHttpFSFWithSWebhdfsFileSystem public class TestHttpFSFWithSWebhdfsFileSystem
extends TestHttpFSWithHttpFSFileSystem { extends TestHttpFSWithHttpFSFileSystem {
private static String classpathDir; private static String classpathDir;
private static final String BASEDIR = System.getProperty("test.build.dir", private static final String BASEDIR =
"target/test-dir") + "/" + UUID.randomUUID(); GenericTestUtils.getTempPath(UUID.randomUUID().toString());
private static Configuration sslConf; private static Configuration sslConf;

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.TestDirHelper; import org.apache.hadoop.test.TestDirHelper;
import org.junit.Assert; import org.junit.Assert;
import org.junit.runner.RunWith; import org.junit.runner.RunWith;
@ -41,9 +42,7 @@ public class TestHttpFSFileSystemLocalFileSystem extends BaseTestHttpFSWith {
static { static {
new TestDirHelper(); new TestDirHelper();
String prefix = File file = GenericTestUtils.getTestDir("local");
System.getProperty("test.build.dir", "target/test-dir") + "/local";
File file = new File(prefix);
file.mkdirs(); file.mkdirs();
PATH_PREFIX = file.getAbsolutePath(); PATH_PREFIX = file.getAbsolutePath();
} }

View File

@ -30,13 +30,14 @@ import org.apache.hadoop.hdfs.nfs.conf.NfsConfigKeys;
import org.apache.hadoop.hdfs.nfs.conf.NfsConfiguration; import org.apache.hadoop.hdfs.nfs.conf.NfsConfiguration;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
public class TestNfs3HttpServer { public class TestNfs3HttpServer {
private static final String BASEDIR = System.getProperty("test.build.dir", private static final String BASEDIR =
"target/test-dir") + "/" + TestNfs3HttpServer.class.getSimpleName(); GenericTestUtils.getTempPath(TestNfs3HttpServer.class.getSimpleName());
private static NfsConfiguration conf = new NfsConfiguration(); private static NfsConfiguration conf = new NfsConfiguration();
private static MiniDFSCluster cluster; private static MiniDFSCluster cluster;
private static String keystoresDir; private static String keystoresDir;

View File

@ -44,6 +44,7 @@ import org.apache.hadoop.hdfs.HDFSPolicyProvider;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.tools.CryptoAdmin; import org.apache.hadoop.hdfs.tools.CryptoAdmin;
import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -63,8 +64,7 @@ public class TestCryptoAdminCLI extends CLITestHelperDFS {
HDFSPolicyProvider.class, PolicyProvider.class); HDFSPolicyProvider.class, PolicyProvider.class);
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1);
tmpDir = new File(System.getProperty("test.build.data", "target"), tmpDir = GenericTestUtils.getTestDir(UUID.randomUUID().toString());
UUID.randomUUID().toString()).getAbsoluteFile();
final Path jksPath = new Path(tmpDir.toString(), "test.jks"); final Path jksPath = new Path(tmpDir.toString(), "test.jks");
conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI,
JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri()); JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri());

View File

@ -557,27 +557,25 @@ public class TestEnhancedByteBufferAccess {
*/ */
@Test @Test
public void testIndirectFallbackReads() throws Exception { public void testIndirectFallbackReads() throws Exception {
final File TEST_DIR = new File( final String testPath = GenericTestUtils
System.getProperty("test.build.data","build/test/data")); .getTestDir("indirectFallbackTestFile").getAbsolutePath();
final String TEST_PATH = TEST_DIR + File.separator +
"indirectFallbackTestFile";
final int TEST_FILE_LENGTH = 16385; final int TEST_FILE_LENGTH = 16385;
final int RANDOM_SEED = 23453; final int RANDOM_SEED = 23453;
FileOutputStream fos = null; FileOutputStream fos = null;
FileInputStream fis = null; FileInputStream fis = null;
try { try {
fos = new FileOutputStream(TEST_PATH); fos = new FileOutputStream(testPath);
Random random = new Random(RANDOM_SEED); Random random = new Random(RANDOM_SEED);
byte original[] = new byte[TEST_FILE_LENGTH]; byte original[] = new byte[TEST_FILE_LENGTH];
random.nextBytes(original); random.nextBytes(original);
fos.write(original); fos.write(original);
fos.close(); fos.close();
fos = null; fos = null;
fis = new FileInputStream(TEST_PATH); fis = new FileInputStream(testPath);
testFallbackImpl(fis, original); testFallbackImpl(fis, original);
} finally { } finally {
IOUtils.cleanup(LOG, fos, fis); IOUtils.cleanup(LOG, fos, fis);
new File(TEST_PATH).delete(); new File(testPath).delete();
} }
} }

View File

@ -24,6 +24,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.security.ssl.SSLFactory;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -50,9 +51,8 @@ public class TestSWebHdfsFileContextMainOperations
private static final HdfsConfiguration CONF = new HdfsConfiguration(); private static final HdfsConfiguration CONF = new HdfsConfiguration();
private static final String BASEDIR = private static final String BASEDIR = GenericTestUtils
System.getProperty("test.build.dir", "target/test-dir") + "/" .getTempPath(TestSWebHdfsFileContextMainOperations.class.getSimpleName());
+ TestSWebHdfsFileContextMainOperations.class.getSimpleName();
protected static int numBlocks = 2; protected static int numBlocks = 2;
protected static final byte[] data = getFileData(numBlocks, protected static final byte[] data = getFileData(numBlocks,
getDefaultBlockSize()); getDefaultBlockSize());

View File

@ -23,6 +23,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -63,7 +64,7 @@ public class TestWebHdfsFileContextMainOperations
@Override @Override
protected FileContextTestHelper createFileContextHelper() { protected FileContextTestHelper createFileContextHelper() {
return new FileContextTestHelper("/tmp/TestWebHdfsFileContextMainOperations"); return new FileContextTestHelper();
} }
public URI getWebhdfsUrl() { public URI getWebhdfsUrl() {
@ -88,8 +89,8 @@ public class TestWebHdfsFileContextMainOperations
public void setUp() throws Exception { public void setUp() throws Exception {
URI webhdfsUrlReal = getWebhdfsUrl(); URI webhdfsUrlReal = getWebhdfsUrl();
Path testBuildData = new Path( Path testBuildData = new Path(
webhdfsUrlReal + "/build/test/data/" + RandomStringUtils webhdfsUrlReal + "/" + GenericTestUtils.DEFAULT_TEST_DATA_PATH
.randomAlphanumeric(10)); + RandomStringUtils.randomAlphanumeric(10));
Path rootPath = new Path(testBuildData, "root-uri"); Path rootPath = new Path(testBuildData, "root-uri");
localFsRootPath = rootPath.makeQualified(webhdfsUrlReal, null); localFsRootPath = rootPath.makeQualified(webhdfsUrlReal, null);

View File

@ -102,7 +102,6 @@ import org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter.SecureResour
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset; import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetUtil;
import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl;
import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream; import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
@ -142,7 +141,8 @@ public class MiniDFSCluster {
private static final String NAMESERVICE_ID_PREFIX = "nameserviceId"; private static final String NAMESERVICE_ID_PREFIX = "nameserviceId";
private static final Log LOG = LogFactory.getLog(MiniDFSCluster.class); private static final Log LOG = LogFactory.getLog(MiniDFSCluster.class);
/** System property to set the data dir: {@value} */ /** System property to set the data dir: {@value} */
public static final String PROP_TEST_BUILD_DATA = "test.build.data"; public static final String PROP_TEST_BUILD_DATA =
GenericTestUtils.SYSPROP_TEST_DATA_DIR;
/** Configuration option to set the data dir: {@value} */ /** Configuration option to set the data dir: {@value} */
public static final String HDFS_MINIDFS_BASEDIR = "hdfs.minidfs.basedir"; public static final String HDFS_MINIDFS_BASEDIR = "hdfs.minidfs.basedir";
public static final String DFS_NAMENODE_SAFEMODE_EXTENSION_TESTING_KEY public static final String DFS_NAMENODE_SAFEMODE_EXTENSION_TESTING_KEY
@ -1828,12 +1828,11 @@ public class MiniDFSCluster {
ShutdownHookManager.get().clearShutdownHooks(); ShutdownHookManager.get().clearShutdownHooks();
if (base_dir != null) { if (base_dir != null) {
if (deleteDfsDir) { if (deleteDfsDir) {
base_dir.delete(); FileUtil.fullyDelete(base_dir);
} else { } else {
base_dir.deleteOnExit(); FileUtil.fullyDeleteOnExit(base_dir);
} }
} }
} }
/** /**
@ -2650,13 +2649,13 @@ public class MiniDFSCluster {
/** /**
* Get the base directory for any DFS cluster whose configuration does * Get the base directory for any DFS cluster whose configuration does
* not explicitly set it. This is done by retrieving the system property * not explicitly set it. This is done via
* {@link #PROP_TEST_BUILD_DATA} (defaulting to "build/test/data" ), * {@link GenericTestUtils#getTestDir()}.
* and returning that directory with a subdir of /dfs.
* @return a directory for use as a miniDFS filesystem. * @return a directory for use as a miniDFS filesystem.
*/ */
public static String getBaseDirectory() { public static String getBaseDirectory() {
return System.getProperty(PROP_TEST_BUILD_DATA, "build/test/data") + "/dfs/"; return GenericTestUtils.getTestDir("dfs").getAbsolutePath()
+ File.separator;
} }
/** /**

View File

@ -115,9 +115,7 @@ public class TestAppendSnapshotTruncate {
dfs.mkdirs(dir); dfs.mkdirs(dir);
dfs.allowSnapshot(dir); dfs.allowSnapshot(dir);
final File localDir = new File( final File localDir = GenericTestUtils.getTestDir(dirPathString);
System.getProperty("test.build.data", "target/test/data")
+ dirPathString);
if (localDir.exists()) { if (localDir.exists()) {
FileUtil.fullyDelete(localDir); FileUtil.fullyDelete(localDir);
} }

View File

@ -3152,8 +3152,8 @@ public class TestDFSShell {
assertTrue(e.getMessage().contains("Invalid path name /.reserved")); assertTrue(e.getMessage().contains("Invalid path name /.reserved"));
} }
final String testdir = System.getProperty("test.build.data") final String testdir = GenericTestUtils.getTempPath(
+ "/TestDFSShell-testCopyReserved"; "TestDFSShell-testCopyReserved");
final Path hdfsTestDir = new Path(testdir); final Path hdfsTestDir = new Path(testdir);
writeFile(fs, new Path(testdir, "testFileForPut")); writeFile(fs, new Path(testdir, "testFileForPut"));
final Path src = new Path(hdfsTestDir, "srcfile"); final Path src = new Path(hdfsTestDir, "srcfile");

View File

@ -89,8 +89,10 @@ public class TestDFSUpgradeFromImage {
static { static {
upgradeConf = new HdfsConfiguration(); upgradeConf = new HdfsConfiguration();
upgradeConf.setInt(DFSConfigKeys.DFS_DATANODE_SCAN_PERIOD_HOURS_KEY, -1); // block scanning off upgradeConf.setInt(DFSConfigKeys.DFS_DATANODE_SCAN_PERIOD_HOURS_KEY, -1); // block scanning off
if (System.getProperty("test.build.data") == null) { // to allow test to be run outside of Maven if (System.getProperty(GenericTestUtils.SYSPROP_TEST_DATA_DIR) == null) {
System.setProperty("test.build.data", "build/test/data"); // to allow test to be run outside of Maven
System.setProperty(GenericTestUtils.SYSPROP_TEST_DATA_DIR,
GenericTestUtils.DEFAULT_TEST_DATA_DIR);
} }
} }
@ -105,19 +107,19 @@ public class TestDFSUpgradeFromImage {
void unpackStorage(String tarFileName, String referenceName) void unpackStorage(String tarFileName, String referenceName)
throws IOException { throws IOException {
String tarFile = System.getProperty("test.cache.data", "build/test/cache") String tarFile = System.getProperty("test.cache.data", "target/test/cache")
+ "/" + tarFileName; + "/" + tarFileName;
String dataDir = System.getProperty("test.build.data", "build/test/data"); File dataDir = GenericTestUtils.getTestDir();
File dfsDir = new File(dataDir, "dfs"); File dfsDir = new File(dataDir, "dfs");
if ( dfsDir.exists() && !FileUtil.fullyDelete(dfsDir) ) { if ( dfsDir.exists() && !FileUtil.fullyDelete(dfsDir) ) {
throw new IOException("Could not delete dfs directory '" + dfsDir + "'"); throw new IOException("Could not delete dfs directory '" + dfsDir + "'");
} }
LOG.info("Unpacking " + tarFile); LOG.info("Unpacking " + tarFile);
FileUtil.unTar(new File(tarFile), new File(dataDir)); FileUtil.unTar(new File(tarFile), dataDir);
//Now read the reference info //Now read the reference info
BufferedReader reader = new BufferedReader(new FileReader( BufferedReader reader = new BufferedReader(new FileReader(
System.getProperty("test.cache.data", "build/test/cache") System.getProperty("test.cache.data", "target/test/cache")
+ "/" + referenceName)); + "/" + referenceName));
String line; String line;
while ( (line = reader.readLine()) != null ) { while ( (line = reader.readLine()) != null ) {
@ -631,10 +633,10 @@ public class TestDFSUpgradeFromImage {
unpackStorage(HADOOP1_BBW_IMAGE, HADOOP_DFS_DIR_TXT); unpackStorage(HADOOP1_BBW_IMAGE, HADOOP_DFS_DIR_TXT);
Configuration conf = new Configuration(upgradeConf); Configuration conf = new Configuration(upgradeConf);
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY,
System.getProperty("test.build.data") + File.separator + GenericTestUtils.getTempPath(
"dfs" + File.separator + "dfs" + File.separator +
"data" + File.separator + "data" + File.separator +
"data1"); "data1"));
upgradeAndVerify(new MiniDFSCluster.Builder(conf). upgradeAndVerify(new MiniDFSCluster.Builder(conf).
numDataNodes(1).enableManagedDfsDirsRedundancy(false). numDataNodes(1).enableManagedDfsDirsRedundancy(false).
manageDataDfsDirs(false), null); manageDataDfsDirs(false), null);

View File

@ -874,8 +874,7 @@ public class TestDFSUtil {
@Test @Test
public void testGetPassword() throws Exception { public void testGetPassword() throws Exception {
File testDir = new File(System.getProperty("test.build.data", File testDir = GenericTestUtils.getTestDir();
"target/test-dir"));
Configuration conf = new Configuration(); Configuration conf = new Configuration();
final Path jksPath = new Path(testDir.toString(), "test.jks"); final Path jksPath = new Path(testDir.toString(), "test.jks");

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hdfs; package org.apache.hadoop.hdfs;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test; import org.junit.Test;
import java.io.File; import java.io.File;
@ -42,11 +43,11 @@ public class TestDatanodeLayoutUpgrade {
TestDFSUpgradeFromImage upgrade = new TestDFSUpgradeFromImage(); TestDFSUpgradeFromImage upgrade = new TestDFSUpgradeFromImage();
upgrade.unpackStorage(HADOOP24_DATANODE, HADOOP_DATANODE_DIR_TXT); upgrade.unpackStorage(HADOOP24_DATANODE, HADOOP_DATANODE_DIR_TXT);
Configuration conf = new Configuration(TestDFSUpgradeFromImage.upgradeConf); Configuration conf = new Configuration(TestDFSUpgradeFromImage.upgradeConf);
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, conf.set(
new File(System.getProperty("test.build.data"), DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, GenericTestUtils.getTestDir(
"dfs" + File.separator + "data").toURI().toString()); "dfs" + File.separator + "data").toURI().toString());
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, conf.set(
new File(System.getProperty("test.build.data"), DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, GenericTestUtils.getTestDir(
"dfs" + File.separator + "name").toURI().toString()); "dfs" + File.separator + "name").toURI().toString());
upgrade.upgradeAndVerify(new MiniDFSCluster.Builder(conf).numDataNodes(1) upgrade.upgradeAndVerify(new MiniDFSCluster.Builder(conf).numDataNodes(1)
.manageDataDfsDirs(false).manageNameDfsDirs(false), null); .manageDataDfsDirs(false).manageNameDfsDirs(false), null);
@ -61,12 +62,12 @@ public class TestDatanodeLayoutUpgrade {
TestDFSUpgradeFromImage upgrade = new TestDFSUpgradeFromImage(); TestDFSUpgradeFromImage upgrade = new TestDFSUpgradeFromImage();
upgrade.unpackStorage(HADOOP_56_DN_LAYOUT, HADOOP_56_DN_LAYOUT_TXT); upgrade.unpackStorage(HADOOP_56_DN_LAYOUT, HADOOP_56_DN_LAYOUT_TXT);
Configuration conf = new Configuration(TestDFSUpgradeFromImage.upgradeConf); Configuration conf = new Configuration(TestDFSUpgradeFromImage.upgradeConf);
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, conf.set(
new File(System.getProperty("test.build.data"), "dfs" + File.separator DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, GenericTestUtils.getTestDir(
+ "data").toURI().toString()); "dfs" + File.separator + "data").toURI().toString());
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, conf.set(
new File(System.getProperty("test.build.data"), "dfs" + File.separator DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, GenericTestUtils.getTestDir(
+ "name").toURI().toString()); "dfs" + File.separator + "name").toURI().toString());
upgrade.upgradeAndVerify(new MiniDFSCluster.Builder(conf).numDataNodes(1) upgrade.upgradeAndVerify(new MiniDFSCluster.Builder(conf).numDataNodes(1)
.manageDataDfsDirs(false).manageNameDfsDirs(false), null); .manageDataDfsDirs(false).manageNameDfsDirs(false), null);
} }

View File

@ -87,12 +87,12 @@ public class TestDatanodeStartupFixesLegacyStorageIDs {
private static void initStorageDirs(final Configuration conf, private static void initStorageDirs(final Configuration conf,
final String testName) { final String testName) {
conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, conf.set(
System.getProperty("test.build.data") + File.separator + DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, GenericTestUtils.getTempPath(
testName + File.separator + "dfs" + File.separator + "data"); testName + File.separator + "dfs" + File.separator + "data"));
conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, conf.set(
System.getProperty("test.build.data") + File.separator + DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, GenericTestUtils.getTempPath(
testName + File.separator + "dfs" + File.separator + "name"); testName + File.separator + "dfs" + File.separator + "name"));
} }

View File

@ -32,13 +32,14 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
import org.apache.hadoop.hdfs.tools.DFSAdmin; import org.apache.hadoop.hdfs.tools.DFSAdmin;
import org.apache.hadoop.hdfs.util.MD5FileUtils; import org.apache.hadoop.hdfs.util.MD5FileUtils;
import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Test; import org.junit.Test;
public class TestFetchImage { public class TestFetchImage {
private static final File FETCHED_IMAGE_FILE = new File( private static final File FETCHED_IMAGE_FILE =
System.getProperty("test.build.dir"), "target/fetched-image-dir"); GenericTestUtils.getTestDir("target/fetched-image-dir");
// Shamelessly stolen from NNStorage. // Shamelessly stolen from NNStorage.
private static final Pattern IMAGE_REGEX = Pattern.compile("fsimage_(\\d+)"); private static final Pattern IMAGE_REGEX = Pattern.compile("fsimage_(\\d+)");

View File

@ -485,7 +485,7 @@ public class TestPread {
Configuration conf = new HdfsConfiguration(); Configuration conf = new HdfsConfiguration();
FileSystem fileSys = FileSystem.getLocal(conf); FileSystem fileSys = FileSystem.getLocal(conf);
try { try {
Path file1 = new Path("build/test/data", "preadtest.dat"); Path file1 = new Path(GenericTestUtils.getTempPath("preadtest.dat"));
writeFile(fileSys, file1); writeFile(fileSys, file1);
pReadFile(fileSys, file1); pReadFile(fileSys, file1);
cleanupFile(fileSys, file1); cleanupFile(fileSys, file1);

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test; import org.junit.Test;
/** /**
@ -208,7 +209,7 @@ public class TestSeekBug {
Configuration conf = new HdfsConfiguration(); Configuration conf = new HdfsConfiguration();
FileSystem fileSys = FileSystem.getLocal(conf); FileSystem fileSys = FileSystem.getLocal(conf);
try { try {
Path file1 = new Path("build/test/data", "seektest.dat"); Path file1 = new Path(GenericTestUtils.getTempPath("seektest.dat"));
writeFile(fileSys, file1); writeFile(fileSys, file1);
seekReadFile(fileSys, file1); seekReadFile(fileSys, file1);
cleanupFile(fileSys, file1); cleanupFile(fileSys, file1);

View File

@ -43,6 +43,7 @@ import org.apache.hadoop.minikdc.MiniKdc;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -74,8 +75,8 @@ public abstract class SaslDataTransferTestCase {
@BeforeClass @BeforeClass
public static void initKdc() throws Exception { public static void initKdc() throws Exception {
baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"), baseDir = GenericTestUtils
SaslDataTransferTestCase.class.getSimpleName()); .getTestDir(SaslDataTransferTestCase.class.getSimpleName());
FileUtil.fullyDelete(baseDir); FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs()); assertTrue(baseDir.mkdirs());

View File

@ -55,6 +55,7 @@ import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After; import org.junit.After;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
@ -82,8 +83,8 @@ public class TestSecureNNWithQJM {
@BeforeClass @BeforeClass
public static void init() throws Exception { public static void init() throws Exception {
baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"), baseDir =
TestSecureNNWithQJM.class.getSimpleName()); GenericTestUtils.getTestDir(TestSecureNNWithQJM.class.getSimpleName());
FileUtil.fullyDelete(baseDir); FileUtil.fullyDelete(baseDir);
assertTrue(baseDir.mkdirs()); assertTrue(baseDir.mkdirs());

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test; import org.junit.Test;
import java.io.File; import java.io.File;
@ -69,7 +70,7 @@ public class TestDataNodeUUID {
@Test(timeout = 10000) @Test(timeout = 10000)
public void testUUIDRegeneration() throws Exception { public void testUUIDRegeneration() throws Exception {
File baseDir = new File(System.getProperty("test.build.data")); File baseDir = GenericTestUtils.getTestDir();
File disk1 = new File(baseDir, "disk1"); File disk1 = new File(baseDir, "disk1");
File disk2 = new File(baseDir, "disk2"); File disk2 = new File(baseDir, "disk2");

View File

@ -47,8 +47,7 @@ public class TestDataStorage {
private final static String BUILD_VERSION = "2.0"; private final static String BUILD_VERSION = "2.0";
private final static String SOFTWARE_VERSION = "2.0"; private final static String SOFTWARE_VERSION = "2.0";
private final static long CTIME = 1; private final static long CTIME = 1;
private final static File TEST_DIR = private final static File TEST_DIR = GenericTestUtils.getTestDir("dstest");
new File(System.getProperty("test.build.data") + "/dstest");
private final static StartupOption START_OPT = StartupOption.REGULAR; private final static StartupOption START_OPT = StartupOption.REGULAR;
private DataNode mockDN = Mockito.mock(DataNode.class); private DataNode mockDN = Mockito.mock(DataNode.class);

View File

@ -22,6 +22,7 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY;
import java.io.File; import java.io.File;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -42,9 +43,8 @@ public class TestCreateEditsLog {
private static final File HDFS_DIR = new File( private static final File HDFS_DIR = new File(
MiniDFSCluster.getBaseDirectory()).getAbsoluteFile(); MiniDFSCluster.getBaseDirectory()).getAbsoluteFile();
private static final File TEST_DIR = new File( private static final File TEST_DIR =
System.getProperty("test.build.data", "build/test/data"), GenericTestUtils.getTestDir("TestCreateEditsLog");
"TestCreateEditsLog").getAbsoluteFile();
private MiniDFSCluster cluster; private MiniDFSCluster cluster;

View File

@ -25,7 +25,6 @@ import java.io.PrintStream;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.Random;
@ -55,6 +54,7 @@ import org.apache.hadoop.hdfs.server.blockmanagement.DecommissionManager;
import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.tools.DFSAdmin; import org.apache.hadoop.hdfs.tools.DFSAdmin;
import org.apache.hadoop.hdfs.util.HostsFileWriter;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.junit.After; import org.junit.After;
@ -71,10 +71,8 @@ public class TestDecommissioningStatus {
private static final int numDatanodes = 2; private static final int numDatanodes = 2;
private static MiniDFSCluster cluster; private static MiniDFSCluster cluster;
private static FileSystem fileSys; private static FileSystem fileSys;
private static Path excludeFile; private static HostsFileWriter hostsFileWriter;
private static FileSystem localFileSys;
private static Configuration conf; private static Configuration conf;
private static Path dir;
final ArrayList<String> decommissionedNodes = new ArrayList<String>(numDatanodes); final ArrayList<String> decommissionedNodes = new ArrayList<String>(numDatanodes);
@ -85,14 +83,8 @@ public class TestDecommissioningStatus {
false); false);
// Set up the hosts/exclude files. // Set up the hosts/exclude files.
localFileSys = FileSystem.getLocal(conf); hostsFileWriter = new HostsFileWriter();
Path workingDir = localFileSys.getWorkingDirectory(); hostsFileWriter.initialize(conf, "work-dir/decommission");
dir = new Path(workingDir, "build/test/data/work-dir/decommission");
assertTrue(localFileSys.mkdirs(dir));
excludeFile = new Path(dir, "exclude");
conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath());
Path includeFile = new Path(dir, "include");
conf.set(DFSConfigKeys.DFS_HOSTS, includeFile.toUri().getPath());
conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY,
1000); 1000);
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1); conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
@ -102,9 +94,6 @@ public class TestDecommissioningStatus {
conf.setInt(DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY, 1); conf.setInt(DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY, 1);
conf.setLong(DFSConfigKeys.DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY, 1); conf.setLong(DFSConfigKeys.DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY, 1);
writeConfigFile(localFileSys, excludeFile, null);
writeConfigFile(localFileSys, includeFile, null);
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDatanodes).build(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDatanodes).build();
cluster.waitActive(); cluster.waitActive();
fileSys = cluster.getFileSystem(); fileSys = cluster.getFileSystem();
@ -115,31 +104,13 @@ public class TestDecommissioningStatus {
@After @After
public void tearDown() throws Exception { public void tearDown() throws Exception {
if (localFileSys != null ) cleanupFile(localFileSys, dir); if (hostsFileWriter != null) {
hostsFileWriter.cleanup();
}
if(fileSys != null) fileSys.close(); if(fileSys != null) fileSys.close();
if(cluster != null) cluster.shutdown(); if(cluster != null) cluster.shutdown();
} }
private static void writeConfigFile(FileSystem fs, Path name,
ArrayList<String> nodes) throws IOException {
// delete if it already exists
if (fs.exists(name)) {
fs.delete(name, true);
}
FSDataOutputStream stm = fs.create(name);
if (nodes != null) {
for (Iterator<String> it = nodes.iterator(); it.hasNext();) {
String node = it.next();
stm.writeBytes(node);
stm.writeBytes("\n");
}
}
stm.close();
}
private void writeFile(FileSystem fileSys, Path name, short repl) private void writeFile(FileSystem fileSys, Path name, short repl)
throws IOException { throws IOException {
// create and write a file that contains three blocks of data // create and write a file that contains three blocks of data
@ -182,25 +153,25 @@ public class TestDecommissioningStatus {
* Decommissions the node at the given index * Decommissions the node at the given index
*/ */
private String decommissionNode(FSNamesystem namesystem, DFSClient client, private String decommissionNode(FSNamesystem namesystem, DFSClient client,
FileSystem localFileSys, int nodeIndex) throws IOException { int nodeIndex) throws IOException {
DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE); DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE);
String nodename = info[nodeIndex].getXferAddr(); String nodename = info[nodeIndex].getXferAddr();
decommissionNode(namesystem, localFileSys, nodename); decommissionNode(namesystem, nodename);
return nodename; return nodename;
} }
/* /*
* Decommissions the node by name * Decommissions the node by name
*/ */
private void decommissionNode(FSNamesystem namesystem, private void decommissionNode(FSNamesystem namesystem, String dnName)
FileSystem localFileSys, String dnName) throws IOException { throws IOException {
System.out.println("Decommissioning node: " + dnName); System.out.println("Decommissioning node: " + dnName);
// write nodename into the exclude file. // write nodename into the exclude file.
ArrayList<String> nodes = new ArrayList<String>(decommissionedNodes); ArrayList<String> nodes = new ArrayList<String>(decommissionedNodes);
nodes.add(dnName); nodes.add(dnName);
writeConfigFile(localFileSys, excludeFile, nodes); hostsFileWriter.initExcludeHosts(nodes.toArray(new String[0]));
} }
private void checkDecommissionStatus(DatanodeDescriptor decommNode, private void checkDecommissionStatus(DatanodeDescriptor decommNode,
@ -292,7 +263,7 @@ public class TestDecommissioningStatus {
FSNamesystem fsn = cluster.getNamesystem(); FSNamesystem fsn = cluster.getNamesystem();
final DatanodeManager dm = fsn.getBlockManager().getDatanodeManager(); final DatanodeManager dm = fsn.getBlockManager().getDatanodeManager();
for (int iteration = 0; iteration < numDatanodes; iteration++) { for (int iteration = 0; iteration < numDatanodes; iteration++) {
String downnode = decommissionNode(fsn, client, localFileSys, iteration); String downnode = decommissionNode(fsn, client, iteration);
dm.refreshNodes(conf); dm.refreshNodes(conf);
decommissionedNodes.add(downnode); decommissionedNodes.add(downnode);
BlockManagerTestUtil.recheckDecommissionState(dm); BlockManagerTestUtil.recheckDecommissionState(dm);
@ -319,7 +290,7 @@ public class TestDecommissioningStatus {
// Call refreshNodes on FSNamesystem with empty exclude file. // Call refreshNodes on FSNamesystem with empty exclude file.
// This will remove the datanodes from decommissioning list and // This will remove the datanodes from decommissioning list and
// make them available again. // make them available again.
writeConfigFile(localFileSys, excludeFile, null); hostsFileWriter.initExcludeHost("");
dm.refreshNodes(conf); dm.refreshNodes(conf);
st1.close(); st1.close();
cleanupFile(fileSys, file1); cleanupFile(fileSys, file1);
@ -349,7 +320,7 @@ public class TestDecommissioningStatus {
// Decommission the DN. // Decommission the DN.
FSNamesystem fsn = cluster.getNamesystem(); FSNamesystem fsn = cluster.getNamesystem();
final DatanodeManager dm = fsn.getBlockManager().getDatanodeManager(); final DatanodeManager dm = fsn.getBlockManager().getDatanodeManager();
decommissionNode(fsn, localFileSys, dnName); decommissionNode(fsn, dnName);
dm.refreshNodes(conf); dm.refreshNodes(conf);
// Stop the DN when decommission is in progress. // Stop the DN when decommission is in progress.
@ -396,7 +367,7 @@ public class TestDecommissioningStatus {
// Call refreshNodes on FSNamesystem with empty exclude file. // Call refreshNodes on FSNamesystem with empty exclude file.
// This will remove the datanodes from decommissioning list and // This will remove the datanodes from decommissioning list and
// make them available again. // make them available again.
writeConfigFile(localFileSys, excludeFile, null); hostsFileWriter.initExcludeHost("");
dm.refreshNodes(conf); dm.refreshNodes(conf);
} }
@ -417,7 +388,7 @@ public class TestDecommissioningStatus {
FSNamesystem fsn = cluster.getNamesystem(); FSNamesystem fsn = cluster.getNamesystem();
final DatanodeManager dm = fsn.getBlockManager().getDatanodeManager(); final DatanodeManager dm = fsn.getBlockManager().getDatanodeManager();
DatanodeDescriptor dnDescriptor = dm.getDatanode(dnID); DatanodeDescriptor dnDescriptor = dm.getDatanode(dnID);
decommissionNode(fsn, localFileSys, dnName); decommissionNode(fsn, dnName);
dm.refreshNodes(conf); dm.refreshNodes(conf);
BlockManagerTestUtil.recheckDecommissionState(dm); BlockManagerTestUtil.recheckDecommissionState(dm);
assertTrue(dnDescriptor.isDecommissioned()); assertTrue(dnDescriptor.isDecommissioned());
@ -428,7 +399,7 @@ public class TestDecommissioningStatus {
// Call refreshNodes on FSNamesystem with empty exclude file to remove the // Call refreshNodes on FSNamesystem with empty exclude file to remove the
// datanode from decommissioning list and make it available again. // datanode from decommissioning list and make it available again.
writeConfigFile(localFileSys, excludeFile, null); hostsFileWriter.initExcludeHost("");
dm.refreshNodes(conf); dm.refreshNodes(conf);
} }
} }

View File

@ -86,8 +86,7 @@ public class TestEditLogFileInputStream {
@Test(timeout=60000) @Test(timeout=60000)
public void testScanCorruptEditLog() throws Exception { public void testScanCorruptEditLog() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
File editLog = new File(System.getProperty( File editLog = new File(GenericTestUtils.getTempPath("testCorruptEditLog"));
"test.build.data", "/tmp"), "testCorruptEditLog");
LOG.debug("Creating test edit log file: " + editLog); LOG.debug("Creating test edit log file: " + editLog);
EditLogFileOutputStream elos = new EditLogFileOutputStream(conf, EditLogFileOutputStream elos = new EditLogFileOutputStream(conf,

View File

@ -69,8 +69,8 @@ public class TestFSImageWithSnapshot {
private final Path dir = new Path("/TestSnapshot"); private final Path dir = new Path("/TestSnapshot");
private static final String testDir = private static final String testDir =
System.getProperty("test.build.data", "build/test/data"); GenericTestUtils.getTestDir().getAbsolutePath();
Configuration conf; Configuration conf;
MiniDFSCluster cluster; MiniDFSCluster cluster;
FSNamesystem fsn; FSNamesystem fsn;

View File

@ -116,8 +116,8 @@ import com.google.common.collect.Sets;
* A JUnit test for doing fsck * A JUnit test for doing fsck
*/ */
public class TestFsck { public class TestFsck {
static final String auditLogFile = System.getProperty("test.build.dir", static final String AUDITLOG_FILE =
"build/test") + "/TestFsck-audit.log"; GenericTestUtils.getTempPath("TestFsck-audit.log");
// Pattern for: // Pattern for:
// allowed=true ugi=name ip=/address cmd=FSCK src=/ dst=null perm=null // allowed=true ugi=name ip=/address cmd=FSCK src=/ dst=null perm=null
@ -206,14 +206,15 @@ public class TestFsck {
/** Sets up log4j logger for auditlogs */ /** Sets up log4j logger for auditlogs */
private void setupAuditLogs() throws IOException { private void setupAuditLogs() throws IOException {
File file = new File(auditLogFile); File file = new File(AUDITLOG_FILE);
if (file.exists()) { if (file.exists()) {
file.delete(); file.delete();
} }
Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger();
logger.setLevel(Level.INFO); logger.setLevel(Level.INFO);
PatternLayout layout = new PatternLayout("%m%n"); PatternLayout layout = new PatternLayout("%m%n");
RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile); RollingFileAppender appender =
new RollingFileAppender(layout, AUDITLOG_FILE);
logger.addAppender(appender); logger.addAppender(appender);
} }
@ -225,7 +226,7 @@ public class TestFsck {
BufferedReader reader = null; BufferedReader reader = null;
try { try {
// Audit log should contain one getfileinfo and one fsck // Audit log should contain one getfileinfo and one fsck
reader = new BufferedReader(new FileReader(auditLogFile)); reader = new BufferedReader(new FileReader(AUDITLOG_FILE));
String line; String line;
// one extra getfileinfo stems from resolving the path // one extra getfileinfo stems from resolving the path

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.http.HttpConfig.Policy;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -42,8 +43,8 @@ import org.junit.runners.Parameterized.Parameters;
@RunWith(value = Parameterized.class) @RunWith(value = Parameterized.class)
public class TestNameNodeHttpServer { public class TestNameNodeHttpServer {
private static final String BASEDIR = System.getProperty("test.build.dir", private static final String BASEDIR = GenericTestUtils
"target/test-dir") + "/" + TestNameNodeHttpServer.class.getSimpleName(); .getTempPath(TestNameNodeHttpServer.class.getSimpleName());
private static String keystoresDir; private static String keystoresDir;
private static String sslConfDir; private static String sslConfDir;
private static Configuration conf; private static Configuration conf;

View File

@ -49,6 +49,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.DeleteOp;
import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.OpInstanceCache; import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.OpInstanceCache;
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType; import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.test.PathUtils;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.junit.Test; import org.junit.Test;
@ -515,8 +516,8 @@ public class TestNameNodeRecovery {
conf.set(DFSConfigKeys.DFS_HA_NAMENODE_ID_KEY, "nn1"); conf.set(DFSConfigKeys.DFS_HA_NAMENODE_ID_KEY, "nn1");
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX, conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX,
"ns1"), "nn1,nn2"); "ns1"), "nn1,nn2");
String baseDir = System.getProperty( String baseDir = GenericTestUtils.getTestDir("setupRecoveryTestConf")
MiniDFSCluster.PROP_TEST_BUILD_DATA, "build/test/data") + "/dfs/"; .getAbsolutePath();
File nameDir = new File(baseDir, "nameR"); File nameDir = new File(baseDir, "nameR");
File secondaryDir = new File(baseDir, "namesecondaryR"); File secondaryDir = new File(baseDir, "namesecondaryR");
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys. conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -238,8 +239,8 @@ public class TestNameNodeRespectsBindHostKeys {
} }
} }
private static final String BASEDIR = System.getProperty("test.build.dir", private static final String BASEDIR = GenericTestUtils
"target/test-dir") + "/" + TestNameNodeRespectsBindHostKeys.class.getSimpleName(); .getTempPath(TestNameNodeRespectsBindHostKeys.class.getSimpleName());
private static void setupSsl() throws Exception { private static void setupSsl() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();

View File

@ -95,7 +95,7 @@ public class TestRenameWithSnapshots {
private static FSDirectory fsdir; private static FSDirectory fsdir;
private static DistributedFileSystem hdfs; private static DistributedFileSystem hdfs;
private static final String testDir = private static final String testDir =
System.getProperty("test.build.data", "build/test/data"); GenericTestUtils.getTestDir().getAbsolutePath();
static private final Path dir = new Path("/testRenameWithSnapshots"); static private final Path dir = new Path("/testRenameWithSnapshots");
static private final Path sub1 = new Path(dir, "sub1"); static private final Path sub1 = new Path(dir, "sub1");
static private final Path file1 = new Path(sub1, "file1"); static private final Path file1 = new Path(sub1, "file1");

View File

@ -98,8 +98,8 @@ public class TestSnapshot {
protected DistributedFileSystem hdfs; protected DistributedFileSystem hdfs;
private static final String testDir = private static final String testDir =
System.getProperty("test.build.data", "build/test/data"); GenericTestUtils.getTestDir().getAbsolutePath();
@Rule @Rule
public ExpectedException exception = ExpectedException.none(); public ExpectedException exception = ExpectedException.none();

View File

@ -35,20 +35,18 @@ import java.io.PrintStream;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.StringTokenizer; import java.util.StringTokenizer;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress; import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.tools.GetConf.Command; import org.apache.hadoop.hdfs.tools.GetConf.Command;
import org.apache.hadoop.hdfs.tools.GetConf.CommandHandler; import org.apache.hadoop.hdfs.tools.GetConf.CommandHandler;
import org.apache.hadoop.hdfs.util.HostsFileWriter;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.junit.Test; import org.junit.Test;
@ -390,42 +388,29 @@ public class TestGetConf {
public void TestGetConfExcludeCommand() throws Exception{ public void TestGetConfExcludeCommand() throws Exception{
HdfsConfiguration conf = new HdfsConfiguration(); HdfsConfiguration conf = new HdfsConfiguration();
// Set up the hosts/exclude files. // Set up the hosts/exclude files.
localFileSys = FileSystem.getLocal(conf); HostsFileWriter hostsFileWriter = new HostsFileWriter();
Path workingDir = localFileSys.getWorkingDirectory(); hostsFileWriter.initialize(conf, "GetConf");
Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/"); Path excludeFile = hostsFileWriter.getExcludeFile();
Path hostsFile = new Path(dir, "hosts");
Path excludeFile = new Path(dir, "exclude");
// Setup conf
conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath());
conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath());
writeConfigFile(hostsFile, null);
writeConfigFile(excludeFile, null);
String[] args = {"-excludeFile"}; String[] args = {"-excludeFile"};
String ret = runTool(conf, args, true); String ret = runTool(conf, args, true);
assertEquals(excludeFile.toUri().getPath(),ret.trim()); assertEquals(excludeFile.toUri().getPath(),ret.trim());
cleanupFile(localFileSys, excludeFile.getParent()); hostsFileWriter.cleanup();
} }
@Test @Test
public void TestGetConfIncludeCommand() throws Exception{ public void TestGetConfIncludeCommand() throws Exception{
HdfsConfiguration conf = new HdfsConfiguration(); HdfsConfiguration conf = new HdfsConfiguration();
// Set up the hosts/exclude files. // Set up the hosts/exclude files.
localFileSys = FileSystem.getLocal(conf); HostsFileWriter hostsFileWriter = new HostsFileWriter();
Path workingDir = localFileSys.getWorkingDirectory(); hostsFileWriter.initialize(conf, "GetConf");
Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/"); Path hostsFile = hostsFileWriter.getIncludeFile();
Path hostsFile = new Path(dir, "hosts");
Path excludeFile = new Path(dir, "exclude");
// Setup conf // Setup conf
conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath());
conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath());
writeConfigFile(hostsFile, null);
writeConfigFile(excludeFile, null);
String[] args = {"-includeFile"}; String[] args = {"-includeFile"};
String ret = runTool(conf, args, true); String ret = runTool(conf, args, true);
assertEquals(hostsFile.toUri().getPath(),ret.trim()); assertEquals(hostsFile.toUri().getPath(),ret.trim());
cleanupFile(localFileSys, excludeFile.getParent()); hostsFileWriter.cleanup();
} }
@Test @Test
@ -443,29 +428,4 @@ public class TestGetConf {
verifyAddresses(conf, TestType.NAMENODE, false, includedNN); verifyAddresses(conf, TestType.NAMENODE, false, includedNN);
verifyAddresses(conf, TestType.NNRPCADDRESSES, true, includedNN); verifyAddresses(conf, TestType.NNRPCADDRESSES, true, includedNN);
} }
private void writeConfigFile(Path name, ArrayList<String> nodes)
throws IOException {
// delete if it already exists
if (localFileSys.exists(name)) {
localFileSys.delete(name, true);
}
FSDataOutputStream stm = localFileSys.create(name);
if (nodes != null) {
for (Iterator<String> it = nodes.iterator(); it.hasNext();) {
String node = it.next();
stm.writeBytes(node);
stm.writeBytes("\n");
}
}
stm.close();
}
private void cleanupFile(FileSystem fileSys, Path name) throws IOException {
assertTrue(fileSys.exists(name));
fileSys.delete(name, true);
assertTrue(!fileSys.exists(name));
}
} }

View File

@ -73,16 +73,26 @@ public class HostsFileWriter {
} }
public void initExcludeHost(String hostNameAndPort) throws IOException { public void initExcludeHost(String hostNameAndPort) throws IOException {
initExcludeHosts(hostNameAndPort);
}
public void initExcludeHosts(String... hostNameAndPorts) throws IOException {
StringBuilder excludeHosts = new StringBuilder();
if (isLegacyHostsFile) { if (isLegacyHostsFile) {
DFSTestUtil.writeFile(localFileSys, excludeFile, hostNameAndPort); for (String hostNameAndPort : hostNameAndPorts) {
excludeHosts.append(hostNameAndPort).append("\n");
}
DFSTestUtil.writeFile(localFileSys, excludeFile, excludeHosts.toString());
} else { } else {
DatanodeAdminProperties dn = new DatanodeAdminProperties();
String [] hostAndPort = hostNameAndPort.split(":");
dn.setHostName(hostAndPort[0]);
dn.setPort(Integer.parseInt(hostAndPort[1]));
dn.setAdminState(AdminStates.DECOMMISSIONED);
HashSet<DatanodeAdminProperties> allDNs = new HashSet<>(); HashSet<DatanodeAdminProperties> allDNs = new HashSet<>();
allDNs.add(dn); for (String hostNameAndPort : hostNameAndPorts) {
DatanodeAdminProperties dn = new DatanodeAdminProperties();
String[] hostAndPort = hostNameAndPort.split(":");
dn.setHostName(hostAndPort[0]);
dn.setPort(Integer.parseInt(hostAndPort[1]));
dn.setAdminState(AdminStates.DECOMMISSIONED);
allDNs.add(dn);
}
CombinedHostsFileWriter.writeFile(combinedFile.toString(), allDNs); CombinedHostsFileWriter.writeFile(combinedFile.toString(), allDNs);
} }
} }
@ -119,4 +129,12 @@ public class HostsFileWriter {
FileUtils.deleteQuietly(new File(fullDir.toUri().getPath())); FileUtils.deleteQuietly(new File(fullDir.toUri().getPath()));
} }
} }
public Path getIncludeFile() {
return includeFile;
}
public Path getExcludeFile() {
return excludeFile;
}
} }

View File

@ -23,6 +23,7 @@ import java.io.FileWriter;
import java.util.Set; import java.util.Set;
import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Before; import org.junit.Before;
import org.junit.After; import org.junit.After;
import org.junit.Test; import org.junit.Test;
@ -35,8 +36,8 @@ import static org.junit.Assert.assertEquals;
public class TestCombinedHostsFileReader { public class TestCombinedHostsFileReader {
// Using /test/build/data/tmp directory to store temporary files // Using /test/build/data/tmp directory to store temporary files
static final String HOSTS_TEST_DIR = new File(System.getProperty( static final String HOSTS_TEST_DIR = GenericTestUtils.getTestDir()
"test.build.data", "/tmp")).getAbsolutePath(); .getAbsolutePath();
File NEW_FILE = new File(HOSTS_TEST_DIR, "dfs.hosts.new.json"); File NEW_FILE = new File(HOSTS_TEST_DIR, "dfs.hosts.new.json");
static final String TEST_CACHE_DATA_DIR = static final String TEST_CACHE_DATA_DIR =

View File

@ -34,14 +34,15 @@ import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpConfig;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Test; import org.junit.Test;
public class TestHttpsFileSystem { public class TestHttpsFileSystem {
private static final String BASEDIR = System.getProperty("test.build.dir", private static final String BASEDIR =
"target/test-dir") + "/" + TestHttpsFileSystem.class.getSimpleName(); GenericTestUtils.getTempPath(TestHttpsFileSystem.class.getSimpleName());
private static MiniDFSCluster cluster; private static MiniDFSCluster cluster;
private static Configuration conf; private static Configuration conf;

View File

@ -56,6 +56,7 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
import org.apache.hadoop.security.token.SecretManager.InvalidToken; import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.junit.Assert; import org.junit.Assert;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -200,8 +201,8 @@ public class TestWebHdfsTokens {
SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf); SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf);
clusterConf.setBoolean(DFSConfigKeys clusterConf.setBoolean(DFSConfigKeys
.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);
String BASEDIR = System.getProperty("test.build.dir", String baseDir =
"target/test-dir") + "/" + TestWebHdfsTokens.class.getSimpleName(); GenericTestUtils.getTempPath(TestWebHdfsTokens.class.getSimpleName());
String keystoresDir; String keystoresDir;
String sslConfDir; String sslConfDir;
@ -210,10 +211,10 @@ public class TestWebHdfsTokens {
clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0"); clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
clusterConf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0"); clusterConf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0");
File base = new File(BASEDIR); File base = new File(baseDir);
FileUtil.fullyDelete(base); FileUtil.fullyDelete(base);
base.mkdirs(); base.mkdirs();
keystoresDir = new File(BASEDIR).getAbsolutePath(); keystoresDir = new File(baseDir).getAbsolutePath();
sslConfDir = KeyStoreTestUtil.getClasspathDir(TestWebHdfsTokens.class); sslConfDir = KeyStoreTestUtil.getClasspathDir(TestWebHdfsTokens.class);
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, clusterConf, false); KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, clusterConf, false);
clusterConf.set(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, clusterConf.set(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY,

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.test;
import java.io.File; import java.io.File;
import org.apache.commons.lang.RandomStringUtils;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
public class PathUtils { public class PathUtils {
@ -37,10 +36,8 @@ public class PathUtils {
} }
public static File getTestDir(Class<?> caller, boolean create) { public static File getTestDir(Class<?> caller, boolean create) {
File dir = File dir = new File(GenericTestUtils.getRandomizedTestDir(),
new File(System.getProperty("test.build.data", "target/test/data") caller.getSimpleName());
+ "/" + RandomStringUtils.randomAlphanumeric(10),
caller.getSimpleName());
if (create) { if (create) {
dir.mkdirs(); dir.mkdirs();
} }