HADOOP-9427. Use JUnit assumptions to skip platform-specific tests. Contributed by Gergely Novák.

This commit is contained in:
Akira Ajisaka 2016-07-27 19:41:09 +09:00
parent 55d5993a8e
commit 54fe17a607
36 changed files with 194 additions and 196 deletions

View File

@ -34,6 +34,7 @@ import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import static org.apache.hadoop.fs.FileContextTestHelper.*; import static org.apache.hadoop.fs.FileContextTestHelper.*;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@ -98,10 +99,7 @@ public abstract class FileContextPermissionBase {
@Test @Test
public void testCreatePermission() throws IOException { public void testCreatePermission() throws IOException {
if (Path.WINDOWS) { assumeNotWindows();
System.out.println("Cannot run test for Windows");
return;
}
String filename = "foo"; String filename = "foo";
Path f = fileContextTestHelper.getTestRootPath(fc, filename); Path f = fileContextTestHelper.getTestRootPath(fc, filename);
fileContextTestHelper.createFile(fc, filename); fileContextTestHelper.createFile(fc, filename);
@ -112,10 +110,7 @@ public abstract class FileContextPermissionBase {
@Test @Test
public void testSetPermission() throws IOException { public void testSetPermission() throws IOException {
if (Path.WINDOWS) { assumeNotWindows();
System.out.println("Cannot run test for Windows");
return;
}
String filename = "foo"; String filename = "foo";
Path f = fileContextTestHelper.getTestRootPath(fc, filename); Path f = fileContextTestHelper.getTestRootPath(fc, filename);
@ -137,10 +132,7 @@ public abstract class FileContextPermissionBase {
@Test @Test
public void testSetOwner() throws IOException { public void testSetOwner() throws IOException {
if (Path.WINDOWS) { assumeNotWindows();
System.out.println("Cannot run test for Windows");
return;
}
String filename = "bar"; String filename = "bar";
Path f = fileContextTestHelper.getTestRootPath(fc, filename); Path f = fileContextTestHelper.getTestRootPath(fc, filename);

View File

@ -43,12 +43,12 @@ import java.util.zip.ZipOutputStream;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.apache.tools.tar.TarEntry; import org.apache.tools.tar.TarEntry;
import org.apache.tools.tar.TarOutputStream; import org.apache.tools.tar.TarOutputStream;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -423,10 +423,8 @@ public class TestFileUtil {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testFailFullyDelete() throws IOException { public void testFailFullyDelete() throws IOException {
if(Shell.WINDOWS) { // Windows Dir.setWritable(false) does not work for directories
// windows Dir.setWritable(false) does not work for directories assumeNotWindows();
return;
}
LOG.info("Running test to verify failure of fullyDelete()"); LOG.info("Running test to verify failure of fullyDelete()");
setupDirsAndNonWritablePermissions(); setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDelete(new MyFile(del)); boolean ret = FileUtil.fullyDelete(new MyFile(del));
@ -504,10 +502,8 @@ public class TestFileUtil {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testFailFullyDeleteContents() throws IOException { public void testFailFullyDeleteContents() throws IOException {
if(Shell.WINDOWS) { // Windows Dir.setWritable(false) does not work for directories
// windows Dir.setWritable(false) does not work for directories assumeNotWindows();
return;
}
LOG.info("Running test to verify failure of fullyDeleteContents()"); LOG.info("Running test to verify failure of fullyDeleteContents()");
setupDirsAndNonWritablePermissions(); setupDirsAndNonWritablePermissions();
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del)); boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));

View File

@ -18,13 +18,13 @@
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.is;
import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -143,7 +143,7 @@ public class TestFsShellCopy {
@Test @Test
public void testCopyFileFromWindowsLocalPath() throws Exception { public void testCopyFileFromWindowsLocalPath() throws Exception {
assumeTrue(Path.WINDOWS); assumeWindows();
String windowsTestRootPath = (new File(testRootDir.toUri().getPath() String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
.toString())).getAbsolutePath(); .toString())).getAbsolutePath();
Path testRoot = new Path(windowsTestRootPath, "testPutFile"); Path testRoot = new Path(windowsTestRootPath, "testPutFile");
@ -158,7 +158,7 @@ public class TestFsShellCopy {
@Test @Test
public void testCopyDirFromWindowsLocalPath() throws Exception { public void testCopyDirFromWindowsLocalPath() throws Exception {
assumeTrue(Path.WINDOWS); assumeWindows();
String windowsTestRootPath = (new File(testRootDir.toUri().getPath() String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
.toString())).getAbsolutePath(); .toString())).getAbsolutePath();
Path testRoot = new Path(windowsTestRootPath, "testPutDir"); Path testRoot = new Path(windowsTestRootPath, "testPutDir");
@ -485,7 +485,7 @@ public class TestFsShellCopy {
@Test @Test
public void testMoveFromWindowsLocalPath() throws Exception { public void testMoveFromWindowsLocalPath() throws Exception {
assumeTrue(Path.WINDOWS); assumeWindows();
Path testRoot = new Path(testRootDir, "testPutFile"); Path testRoot = new Path(testRootDir, "testPutFile");
lfs.delete(testRoot, true); lfs.delete(testRoot, true);
lfs.mkdirs(testRoot); lfs.mkdirs(testRoot);
@ -504,7 +504,7 @@ public class TestFsShellCopy {
@Test @Test
public void testGetWindowsLocalPath() throws Exception { public void testGetWindowsLocalPath() throws Exception {
assumeTrue(Path.WINDOWS); assumeWindows();
String winDstFile = (new File(dstPath.toUri().getPath() String winDstFile = (new File(dstPath.toUri().getPath()
.toString())).getAbsolutePath(); .toString())).getAbsolutePath();
shellRun(0, "-get", srcPath.toString(), winDstFile); shellRun(0, "-get", srcPath.toString(), winDstFile);

View File

@ -34,8 +34,8 @@ import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters; import org.junit.runners.Parameterized.Parameters;
import org.junit.Test; import org.junit.Test;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.junit.Assume.*;
/** This test LocalDirAllocator works correctly; /** This test LocalDirAllocator works correctly;
* Every test case uses different buffer dirs to * Every test case uses different buffer dirs to
@ -57,8 +57,6 @@ public class TestLocalDirAllocator {
final static private LocalDirAllocator dirAllocator = final static private LocalDirAllocator dirAllocator =
new LocalDirAllocator(CONTEXT); new LocalDirAllocator(CONTEXT);
static LocalFileSystem localFs; static LocalFileSystem localFs;
final static private boolean isWindows =
System.getProperty("os.name").startsWith("Windows");
final static int SMALL_FILE_SIZE = 100; final static int SMALL_FILE_SIZE = 100;
final static private String RELATIVE = "/RELATIVE"; final static private String RELATIVE = "/RELATIVE";
final static private String ABSOLUTE = "/ABSOLUTE"; final static private String ABSOLUTE = "/ABSOLUTE";
@ -132,7 +130,7 @@ public class TestLocalDirAllocator {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void test0() throws Exception { public void test0() throws Exception {
if (isWindows) return; assumeNotWindows();
String dir0 = buildBufferDir(ROOT, 0); String dir0 = buildBufferDir(ROOT, 0);
String dir1 = buildBufferDir(ROOT, 1); String dir1 = buildBufferDir(ROOT, 1);
try { try {
@ -154,7 +152,7 @@ public class TestLocalDirAllocator {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testROBufferDirAndRWBufferDir() throws Exception { public void testROBufferDirAndRWBufferDir() throws Exception {
if (isWindows) return; assumeNotWindows();
String dir1 = buildBufferDir(ROOT, 1); String dir1 = buildBufferDir(ROOT, 1);
String dir2 = buildBufferDir(ROOT, 2); String dir2 = buildBufferDir(ROOT, 2);
try { try {
@ -174,7 +172,7 @@ public class TestLocalDirAllocator {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testDirsNotExist() throws Exception { public void testDirsNotExist() throws Exception {
if (isWindows) return; assumeNotWindows();
String dir2 = buildBufferDir(ROOT, 2); String dir2 = buildBufferDir(ROOT, 2);
String dir3 = buildBufferDir(ROOT, 3); String dir3 = buildBufferDir(ROOT, 3);
try { try {
@ -200,7 +198,7 @@ public class TestLocalDirAllocator {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testRWBufferDirBecomesRO() throws Exception { public void testRWBufferDirBecomesRO() throws Exception {
if (isWindows) return; assumeNotWindows();
String dir3 = buildBufferDir(ROOT, 3); String dir3 = buildBufferDir(ROOT, 3);
String dir4 = buildBufferDir(ROOT, 4); String dir4 = buildBufferDir(ROOT, 4);
try { try {
@ -238,7 +236,7 @@ public class TestLocalDirAllocator {
static final int TRIALS = 100; static final int TRIALS = 100;
@Test (timeout = 30000) @Test (timeout = 30000)
public void testCreateManyFiles() throws Exception { public void testCreateManyFiles() throws Exception {
if (isWindows) return; assumeNotWindows();
String dir5 = buildBufferDir(ROOT, 5); String dir5 = buildBufferDir(ROOT, 5);
String dir6 = buildBufferDir(ROOT, 6); String dir6 = buildBufferDir(ROOT, 6);
try { try {
@ -348,7 +346,7 @@ public class TestLocalDirAllocator {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testNoSideEffects() throws IOException { public void testNoSideEffects() throws IOException {
assumeTrue(!isWindows); assumeNotWindows();
String dir = buildBufferDir(ROOT, 0); String dir = buildBufferDir(ROOT, 0);
try { try {
conf.set(CONTEXT, dir); conf.set(CONTEXT, dir);
@ -370,7 +368,7 @@ public class TestLocalDirAllocator {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testGetLocalPathToRead() throws IOException { public void testGetLocalPathToRead() throws IOException {
assumeTrue(!isWindows); assumeNotWindows();
String dir = buildBufferDir(ROOT, 0); String dir = buildBufferDir(ROOT, 0);
try { try {
conf.set(CONTEXT, dir); conf.set(CONTEXT, dir);
@ -395,7 +393,7 @@ public class TestLocalDirAllocator {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testGetAllLocalPathsToRead() throws IOException { public void testGetAllLocalPathsToRead() throws IOException {
assumeTrue(!isWindows); assumeNotWindows();
String dir0 = buildBufferDir(ROOT, 0); String dir0 = buildBufferDir(ROOT, 0);
String dir1 = buildBufferDir(ROOT, 1); String dir1 = buildBufferDir(ROOT, 1);

View File

@ -21,7 +21,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem.Statistics; import org.apache.hadoop.fs.FileSystem.Statistics;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import static org.apache.hadoop.fs.FileSystemTestHelper.*; import static org.apache.hadoop.fs.FileSystemTestHelper.*;
@ -31,8 +30,9 @@ import java.net.URI;
import java.util.Arrays; import java.util.Arrays;
import java.util.Random; import java.util.Random;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.junit.Assume.assumeTrue;
import static org.mockito.Mockito.*; import static org.mockito.Mockito.*;
import org.junit.After; import org.junit.After;
@ -287,7 +287,7 @@ public class TestLocalFileSystem {
@Test(timeout = 1000) @Test(timeout = 1000)
public void testListStatusWithColons() throws IOException { public void testListStatusWithColons() throws IOException {
assumeTrue(!Shell.WINDOWS); assumeNotWindows();
File colonFile = new File(TEST_ROOT_DIR, "foo:bar"); File colonFile = new File(TEST_ROOT_DIR, "foo:bar");
colonFile.mkdirs(); colonFile.mkdirs();
FileStatus[] stats = fileSys.listStatus(new Path(TEST_ROOT_DIR)); FileStatus[] stats = fileSys.listStatus(new Path(TEST_ROOT_DIR));
@ -298,7 +298,7 @@ public class TestLocalFileSystem {
@Test @Test
public void testListStatusReturnConsistentPathOnWindows() throws IOException { public void testListStatusReturnConsistentPathOnWindows() throws IOException {
assumeTrue(Shell.WINDOWS); assumeWindows();
String dirNoDriveSpec = TEST_ROOT_DIR; String dirNoDriveSpec = TEST_ROOT_DIR;
if (dirNoDriveSpec.charAt(1) == ':') if (dirNoDriveSpec.charAt(1) == ':')
dirNoDriveSpec = dirNoDriveSpec.substring(2); dirNoDriveSpec = dirNoDriveSpec.substring(2);

View File

@ -26,7 +26,10 @@ import org.apache.hadoop.util.Shell;
import java.io.*; import java.io.*;
import java.util.*; import java.util.*;
import junit.framework.*; import org.junit.Test;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -37,7 +40,7 @@ import static org.junit.Assert.assertThat;
/** /**
* This class tests the local file system via the FileSystem abstraction. * This class tests the local file system via the FileSystem abstraction.
*/ */
public class TestLocalFileSystemPermission extends TestCase { public class TestLocalFileSystemPermission {
public static final Logger LOGGER = public static final Logger LOGGER =
LoggerFactory.getLogger(TestFcLocalFsPermission.class); LoggerFactory.getLogger(TestFcLocalFsPermission.class);
@ -71,11 +74,9 @@ public class TestLocalFileSystemPermission extends TestCase {
assertTrue(!fs.exists(name)); assertTrue(!fs.exists(name));
} }
@Test
public void testLocalFSDirsetPermission() throws IOException { public void testLocalFSDirsetPermission() throws IOException {
if (Path.WINDOWS) { assumeNotWindows();
LOGGER.info("Cannot run test for Windows");
return;
}
LocalFileSystem localfs = FileSystem.getLocal(new Configuration()); LocalFileSystem localfs = FileSystem.getLocal(new Configuration());
Configuration conf = localfs.getConf(); Configuration conf = localfs.getConf();
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044"); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044");
@ -124,11 +125,9 @@ public class TestLocalFileSystemPermission extends TestCase {
} }
/** Test LocalFileSystem.setPermission */ /** Test LocalFileSystem.setPermission */
@Test
public void testLocalFSsetPermission() throws IOException { public void testLocalFSsetPermission() throws IOException {
if (Path.WINDOWS) { assumeNotWindows();
LOGGER.info("Cannot run test for Windows");
return;
}
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044"); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044");
LocalFileSystem localfs = FileSystem.getLocal(conf); LocalFileSystem localfs = FileSystem.getLocal(conf);
@ -195,6 +194,7 @@ public class TestLocalFileSystemPermission extends TestCase {
} }
/** Test LocalFileSystem.setOwner. */ /** Test LocalFileSystem.setOwner. */
@Test
public void testLocalFSsetOwner() throws IOException { public void testLocalFSsetOwner() throws IOException {
if (Path.WINDOWS) { if (Path.WINDOWS) {
LOGGER.info("Cannot run test for Windows"); LOGGER.info("Cannot run test for Windows");
@ -248,6 +248,7 @@ public class TestLocalFileSystemPermission extends TestCase {
* 5. For this directory we expect 715 as permission not 755 * 5. For this directory we expect 715 as permission not 755
* @throws Exception we can throw away all the exception. * @throws Exception we can throw away all the exception.
*/ */
@Test
public void testSetUmaskInRealTime() throws Exception { public void testSetUmaskInRealTime() throws Exception {
if (Path.WINDOWS) { if (Path.WINDOWS) {
LOGGER.info("Cannot run test for Windows"); LOGGER.info("Cannot run test for Windows");

View File

@ -31,9 +31,17 @@ import org.apache.hadoop.util.Shell;
import com.google.common.base.Joiner; import com.google.common.base.Joiner;
import junit.framework.TestCase; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
public class TestPath extends TestCase { /**
* Test Hadoop Filesystem Paths.
*/
public class TestPath {
/** /**
* Merge a bunch of Path objects into a sorted semicolon-separated * Merge a bunch of Path objects into a sorted semicolon-separated
* path string. * path string.
@ -242,9 +250,7 @@ public class TestPath extends TestCase {
/** Test that Windows paths are correctly handled */ /** Test that Windows paths are correctly handled */
@Test (timeout = 5000) @Test (timeout = 5000)
public void testWindowsPaths() throws URISyntaxException, IOException { public void testWindowsPaths() throws URISyntaxException, IOException {
if (!Path.WINDOWS) { assumeWindows();
return;
}
assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar"); assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar");
assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar"); assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar");
@ -255,9 +261,7 @@ public class TestPath extends TestCase {
/** Test invalid paths on Windows are correctly rejected */ /** Test invalid paths on Windows are correctly rejected */
@Test (timeout = 5000) @Test (timeout = 5000)
public void testInvalidWindowsPaths() throws URISyntaxException, IOException { public void testInvalidWindowsPaths() throws URISyntaxException, IOException {
if (!Path.WINDOWS) { assumeWindows();
return;
}
String [] invalidPaths = { String [] invalidPaths = {
"hdfs:\\\\\\tmp" "hdfs:\\\\\\tmp"
@ -401,7 +405,7 @@ public class TestPath extends TestCase {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testGlobEscapeStatus() throws Exception { public void testGlobEscapeStatus() throws Exception {
// This test is not meaningful on Windows where * is disallowed in file name. // This test is not meaningful on Windows where * is disallowed in file name.
if (Shell.WINDOWS) return; assumeNotWindows();
FileSystem lfs = FileSystem.getLocal(new Configuration()); FileSystem lfs = FileSystem.getLocal(new Configuration());
Path testRoot = lfs.makeQualified( Path testRoot = lfs.makeQualified(
new Path(GenericTestUtils.getTempPath("testPathGlob"))); new Path(GenericTestUtils.getTempPath("testPathGlob")));
@ -493,7 +497,7 @@ public class TestPath extends TestCase {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testIsWindowsAbsolutePath() { public void testIsWindowsAbsolutePath() {
if (!Shell.WINDOWS) return; assumeWindows();
assertTrue(Path.isWindowsAbsolutePath("C:\\test", false)); assertTrue(Path.isWindowsAbsolutePath("C:\\test", false));
assertTrue(Path.isWindowsAbsolutePath("C:/test", false)); assertTrue(Path.isWindowsAbsolutePath("C:/test", false));
assertTrue(Path.isWindowsAbsolutePath("/C:/test", true)); assertTrue(Path.isWindowsAbsolutePath("/C:/test", true));

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
@ -29,7 +30,6 @@ import java.io.IOException;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.junit.Test; import org.junit.Test;
@ -71,37 +71,37 @@ abstract public class TestSymlinkLocalFS extends SymlinkBaseTest {
@Override @Override
public void testCreateDanglingLink() throws IOException { public void testCreateDanglingLink() throws IOException {
// Dangling symlinks are not supported on Windows local file system. // Dangling symlinks are not supported on Windows local file system.
assumeTrue(!Path.WINDOWS); assumeNotWindows();
super.testCreateDanglingLink(); super.testCreateDanglingLink();
} }
@Override @Override
public void testCreateFileViaDanglingLinkParent() throws IOException { public void testCreateFileViaDanglingLinkParent() throws IOException {
assumeTrue(!Path.WINDOWS); assumeNotWindows();
super.testCreateFileViaDanglingLinkParent(); super.testCreateFileViaDanglingLinkParent();
} }
@Override @Override
public void testOpenResolvesLinks() throws IOException { public void testOpenResolvesLinks() throws IOException {
assumeTrue(!Path.WINDOWS); assumeNotWindows();
super.testOpenResolvesLinks(); super.testOpenResolvesLinks();
} }
@Override @Override
public void testRecursiveLinks() throws IOException { public void testRecursiveLinks() throws IOException {
assumeTrue(!Path.WINDOWS); assumeNotWindows();
super.testRecursiveLinks(); super.testRecursiveLinks();
} }
@Override @Override
public void testRenameDirToDanglingSymlink() throws IOException { public void testRenameDirToDanglingSymlink() throws IOException {
assumeTrue(!Path.WINDOWS); assumeNotWindows();
super.testRenameDirToDanglingSymlink(); super.testRenameDirToDanglingSymlink();
} }
@Override @Override
public void testStatDanglingLink() throws IOException { public void testStatDanglingLink() throws IOException {
assumeTrue(!Path.WINDOWS); assumeNotWindows();
super.testStatDanglingLink(); super.testStatDanglingLink();
} }
@ -126,7 +126,7 @@ abstract public class TestSymlinkLocalFS extends SymlinkBaseTest {
@Test(timeout=1000) @Test(timeout=1000)
/** Stat and lstat a dangling link */ /** Stat and lstat a dangling link */
public void testDanglingLink() throws IOException { public void testDanglingLink() throws IOException {
assumeTrue(!Path.WINDOWS); assumeNotWindows();
Path fileAbs = new Path(testBaseDir1()+"/file"); Path fileAbs = new Path(testBaseDir1()+"/file");
Path fileQual = new Path(testURI().toString(), fileAbs); Path fileQual = new Path(testURI().toString(), fileAbs);
Path link = new Path(testBaseDir1()+"/linkToFile"); Path link = new Path(testBaseDir1()+"/linkToFile");
@ -235,7 +235,7 @@ abstract public class TestSymlinkLocalFS extends SymlinkBaseTest {
@Override @Override
public void testSetTimesDanglingLink() throws IOException { public void testSetTimesDanglingLink() throws IOException {
assumeTrue(!Path.WINDOWS); assumeNotWindows();
super.testSetTimesDanglingLink(); super.testSetTimesDanglingLink();
} }
} }

View File

@ -17,12 +17,11 @@
*/ */
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import org.apache.hadoop.util.Shell;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import java.io.IOException; import java.io.IOException;
import static org.junit.Assume.assumeTrue; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS { public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS {
@ -34,7 +33,7 @@ public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS {
@Override @Override
public void testRenameFileWithDestParentSymlink() throws IOException { public void testRenameFileWithDestParentSymlink() throws IOException {
assumeTrue(!Shell.WINDOWS); assumeNotWindows();
super.testRenameFileWithDestParentSymlink(); super.testRenameFileWithDestParentSymlink();
} }
} }

View File

@ -22,14 +22,13 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.util.Shell;
import org.junit.BeforeClass; import org.junit.BeforeClass;
import org.junit.Ignore; import org.junit.Ignore;
import org.junit.Test; import org.junit.Test;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS { public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS {
@ -64,7 +63,7 @@ public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS {
@Override @Override
public void testRenameFileWithDestParentSymlink() throws IOException { public void testRenameFileWithDestParentSymlink() throws IOException {
assumeTrue(!Shell.WINDOWS); assumeNotWindows();
super.testRenameFileWithDestParentSymlink(); super.testRenameFileWithDestParentSymlink();
} }

View File

@ -30,7 +30,6 @@ import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
import org.apache.sshd.SshServer; import org.apache.sshd.SshServer;
import org.apache.sshd.common.NamedFactory; import org.apache.sshd.common.NamedFactory;
@ -48,8 +47,8 @@ import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.rules.TestName; import org.junit.rules.TestName;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.junit.Assume.assumeTrue;
public class TestSFTPFileSystem { public class TestSFTPFileSystem {
@ -99,7 +98,7 @@ public class TestSFTPFileSystem {
@BeforeClass @BeforeClass
public static void setUp() throws Exception { public static void setUp() throws Exception {
// skip all tests if running on Windows // skip all tests if running on Windows
assumeTrue(!Shell.WINDOWS); assumeNotWindows();
startSshdServer(); startSshdServer();

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.fs.shell; package org.apache.hadoop.fs.shell;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@ -126,9 +127,7 @@ public class TestPathData {
@Test (timeout = 5000) @Test (timeout = 5000)
public void testToFileRawWindowsPaths() throws Exception { public void testToFileRawWindowsPaths() throws Exception {
if (!Path.WINDOWS) { assumeWindows();
return;
}
// Can we handle raw Windows paths? The files need not exist for // Can we handle raw Windows paths? The files need not exist for
// these tests to succeed. // these tests to succeed.
@ -155,9 +154,7 @@ public class TestPathData {
@Test (timeout = 5000) @Test (timeout = 5000)
public void testInvalidWindowsPath() throws Exception { public void testInvalidWindowsPath() throws Exception {
if (!Path.WINDOWS) { assumeWindows();
return;
}
// Verify that the following invalid paths are rejected. // Verify that the following invalid paths are rejected.
String [] winPaths = { String [] winPaths = {

View File

@ -42,6 +42,8 @@ import org.junit.Test;
import static org.junit.Assume.*; import static org.junit.Assume.*;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import org.apache.commons.io.FileUtils; import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
@ -107,9 +109,7 @@ public class TestNativeIO {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testMultiThreadedFstat() throws Exception { public void testMultiThreadedFstat() throws Exception {
if (Path.WINDOWS) { assumeNotWindows();
return;
}
final FileOutputStream fos = new FileOutputStream( final FileOutputStream fos = new FileOutputStream(
new File(TEST_DIR, "testfstat")); new File(TEST_DIR, "testfstat"));
@ -165,9 +165,7 @@ public class TestNativeIO {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testSetFilePointer() throws Exception { public void testSetFilePointer() throws Exception {
if (!Path.WINDOWS) { assumeWindows();
return;
}
LOG.info("Set a file pointer on Windows"); LOG.info("Set a file pointer on Windows");
try { try {
@ -212,9 +210,7 @@ public class TestNativeIO {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testCreateFile() throws Exception { public void testCreateFile() throws Exception {
if (!Path.WINDOWS) { assumeWindows();
return;
}
LOG.info("Open a file on Windows with SHARE_DELETE shared mode"); LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
try { try {
@ -255,9 +251,7 @@ public class TestNativeIO {
/** Validate access checks on Windows */ /** Validate access checks on Windows */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testAccess() throws Exception { public void testAccess() throws Exception {
if (!Path.WINDOWS) { assumeWindows();
return;
}
File testFile = new File(TEST_DIR, "testfileaccess"); File testFile = new File(TEST_DIR, "testfileaccess");
assertTrue(testFile.createNewFile()); assertTrue(testFile.createNewFile());
@ -331,9 +325,7 @@ public class TestNativeIO {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testOpenMissingWithoutCreate() throws Exception { public void testOpenMissingWithoutCreate() throws Exception {
if (Path.WINDOWS) { assumeNotWindows();
return;
}
LOG.info("Open a missing file without O_CREAT and it should fail"); LOG.info("Open a missing file without O_CREAT and it should fail");
try { try {
@ -348,9 +340,7 @@ public class TestNativeIO {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testOpenWithCreate() throws Exception { public void testOpenWithCreate() throws Exception {
if (Path.WINDOWS) { assumeNotWindows();
return;
}
LOG.info("Test creating a file with O_CREAT"); LOG.info("Test creating a file with O_CREAT");
FileDescriptor fd = NativeIO.POSIX.open( FileDescriptor fd = NativeIO.POSIX.open(
@ -382,9 +372,7 @@ public class TestNativeIO {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testFDDoesntLeak() throws IOException { public void testFDDoesntLeak() throws IOException {
if (Path.WINDOWS) { assumeNotWindows();
return;
}
for (int i = 0; i < 10000; i++) { for (int i = 0; i < 10000; i++) {
FileDescriptor fd = NativeIO.POSIX.open( FileDescriptor fd = NativeIO.POSIX.open(
@ -403,9 +391,7 @@ public class TestNativeIO {
*/ */
@Test (timeout = 30000) @Test (timeout = 30000)
public void testChmod() throws Exception { public void testChmod() throws Exception {
if (Path.WINDOWS) { assumeNotWindows();
return;
}
try { try {
NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777); NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777);
@ -428,9 +414,7 @@ public class TestNativeIO {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testPosixFadvise() throws Exception { public void testPosixFadvise() throws Exception {
if (Path.WINDOWS) { assumeNotWindows();
return;
}
FileInputStream fis = new FileInputStream("/dev/zero"); FileInputStream fis = new FileInputStream("/dev/zero");
try { try {
@ -497,19 +481,13 @@ public class TestNativeIO {
@Test (timeout = 30000) @Test (timeout = 30000)
public void testGetUserName() throws IOException { public void testGetUserName() throws IOException {
if (Path.WINDOWS) { assumeNotWindows();
return;
}
assertFalse(NativeIO.POSIX.getUserName(0).isEmpty()); assertFalse(NativeIO.POSIX.getUserName(0).isEmpty());
} }
@Test (timeout = 30000) @Test (timeout = 30000)
public void testGetGroupName() throws IOException { public void testGetGroupName() throws IOException {
if (Path.WINDOWS) { assumeNotWindows();
return;
}
assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty()); assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty());
} }
@ -647,8 +625,7 @@ public class TestNativeIO {
@Test (timeout=10000) @Test (timeout=10000)
public void testNativePosixConsts() { public void testNativePosixConsts() {
assumeTrue("Native POSIX constants not required for Windows", assumeNotWindows("Native POSIX constants not required for Windows");
!Path.WINDOWS);
assertTrue("Native 0_RDONLY const not set", O_RDONLY >= 0); assertTrue("Native 0_RDONLY const not set", O_RDONLY >= 0);
assertTrue("Native 0_WRONLY const not set", O_WRONLY >= 0); assertTrue("Native 0_WRONLY const not set", O_WRONLY >= 0);
assertTrue("Native 0_RDWR const not set", O_RDWR >= 0); assertTrue("Native 0_RDWR const not set", O_RDWR >= 0);

View File

@ -30,15 +30,14 @@ import javax.naming.NameNotFoundException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.util.Time; import org.apache.hadoop.util.Time;
import org.junit.Test; import org.junit.Test;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.not;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.junit.Assume.assumeTrue;
/** /**
* Test host name and IP resolution and caching. * Test host name and IP resolution and caching.
@ -197,7 +196,7 @@ public class TestDNS {
*/ */
@Test (timeout=60000) @Test (timeout=60000)
public void testLookupWithHostsFallback() throws Exception { public void testLookupWithHostsFallback() throws Exception {
assumeTrue(!Shell.WINDOWS); assumeNotWindows();
final String oldHostname = changeDnsCachedHostname(DUMMY_HOSTNAME); final String oldHostname = changeDnsCachedHostname(DUMMY_HOSTNAME);
try { try {

View File

@ -17,9 +17,9 @@
*/ */
package org.apache.hadoop.security; package org.apache.hadoop.security;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.File; import java.io.File;
import java.io.FileOutputStream; import java.io.FileOutputStream;
@ -28,7 +28,6 @@ import java.io.OutputStream;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.security.ShellBasedIdMapping.PassThroughMap; import org.apache.hadoop.security.ShellBasedIdMapping.PassThroughMap;
import org.apache.hadoop.security.ShellBasedIdMapping.StaticMapping; import org.apache.hadoop.security.ShellBasedIdMapping.StaticMapping;
import org.junit.Test; import org.junit.Test;
@ -87,7 +86,7 @@ public class TestShellBasedIdMapping {
@Test @Test
public void testStaticMapping() throws IOException { public void testStaticMapping() throws IOException {
assumeTrue(!Shell.WINDOWS); assumeNotWindows();
Map<Integer, Integer> uidStaticMap = new PassThroughMap<Integer>(); Map<Integer, Integer> uidStaticMap = new PassThroughMap<Integer>();
Map<Integer, Integer> gidStaticMap = new PassThroughMap<Integer>(); Map<Integer, Integer> gidStaticMap = new PassThroughMap<Integer>();
@ -129,7 +128,7 @@ public class TestShellBasedIdMapping {
// Test staticMap refreshing // Test staticMap refreshing
@Test @Test
public void testStaticMapUpdate() throws IOException { public void testStaticMapUpdate() throws IOException {
assumeTrue(!Shell.WINDOWS); assumeNotWindows();
File tempStaticMapFile = File.createTempFile("nfs-", ".map"); File tempStaticMapFile = File.createTempFile("nfs-", ".map");
tempStaticMapFile.delete(); tempStaticMapFile.delete();
Configuration conf = new Configuration(); Configuration conf = new Configuration();
@ -207,7 +206,7 @@ public class TestShellBasedIdMapping {
@Test @Test
public void testDuplicates() throws IOException { public void testDuplicates() throws IOException {
assumeTrue(!Shell.WINDOWS); assumeNotWindows();
String GET_ALL_USERS_CMD = "echo \"root:x:0:0:root:/root:/bin/bash\n" String GET_ALL_USERS_CMD = "echo \"root:x:0:0:root:/root:/bin/bash\n"
+ "hdfs:x:11501:10787:Grid Distributed File System:/home/hdfs:/bin/bash\n" + "hdfs:x:11501:10787:Grid Distributed File System:/home/hdfs:/bin/bash\n"
+ "hdfs:x:11502:10788:Grid Distributed File System:/home/hdfs:/bin/bash\n" + "hdfs:x:11502:10788:Grid Distributed File System:/home/hdfs:/bin/bash\n"
@ -247,7 +246,7 @@ public class TestShellBasedIdMapping {
@Test @Test
public void testIdOutOfIntegerRange() throws IOException { public void testIdOutOfIntegerRange() throws IOException {
assumeTrue(!Shell.WINDOWS); assumeNotWindows();
String GET_ALL_USERS_CMD = "echo \"" String GET_ALL_USERS_CMD = "echo \""
+ "nfsnobody:x:4294967294:4294967294:Anonymous NFS User:/var/lib/nfs:/sbin/nologin\n" + "nfsnobody:x:4294967294:4294967294:Anonymous NFS User:/var/lib/nfs:/sbin/nologin\n"
+ "nfsnobody1:x:4294967295:4294967295:Anonymous NFS User:/var/lib/nfs1:/sbin/nologin\n" + "nfsnobody1:x:4294967295:4294967295:Anonymous NFS User:/var/lib/nfs1:/sbin/nologin\n"

View File

@ -0,0 +1,47 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.test;
import org.junit.internal.AssumptionViolatedException;
/**
* JUnit assumptions for the environment (OS).
*/
public final class PlatformAssumptions {
public static final String OS_NAME = System.getProperty("os.name");
public static final boolean WINDOWS = OS_NAME.startsWith("Windows");
private PlatformAssumptions() { }
public static void assumeNotWindows() {
assumeNotWindows("Expected Unix-like platform but got " + OS_NAME);
}
public static void assumeNotWindows(String message) {
if (WINDOWS) {
throw new AssumptionViolatedException(message);
}
}
public static void assumeWindows() {
if (!WINDOWS) {
throw new AssumptionViolatedException(
"Expected Windows platform but got " + OS_NAME);
}
}
}

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.util; package org.apache.hadoop.util;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.junit.Assume.assumeTrue;
import static org.junit.matchers.JUnitMatchers.containsString; import static org.junit.matchers.JUnitMatchers.containsString;
import java.io.File; import java.io.File;
@ -53,7 +53,7 @@ public class TestWinUtils {
@Before @Before
public void setUp() throws IOException { public void setUp() throws IOException {
// Not supported on non-Windows platforms // Not supported on non-Windows platforms
assumeTrue(Shell.WINDOWS); assumeWindows();
TEST_DIR.mkdirs(); TEST_DIR.mkdirs();
assertTrue("Failed to create Test directory " + TEST_DIR, assertTrue("Failed to create Test directory " + TEST_DIR,
TEST_DIR.isDirectory() ); TEST_DIR.isDirectory() );

View File

@ -17,10 +17,10 @@
*/ */
package org.apache.hadoop.fs; package org.apache.hadoop.fs;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import java.io.IOException; import java.io.IOException;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.UUID; import java.util.UUID;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -495,7 +495,7 @@ public class TestGlobPaths {
public void pTestEscape() throws IOException { public void pTestEscape() throws IOException {
// Skip the test case on Windows because backslash will be treated as a // Skip the test case on Windows because backslash will be treated as a
// path separator instead of an escaping character on Windows. // path separator instead of an escaping character on Windows.
org.junit.Assume.assumeTrue(!Path.WINDOWS); assumeNotWindows();
try { try {
String [] files = new String[] {USER_DIR+"/ab\\[c.d"}; String [] files = new String[] {USER_DIR+"/ab\\[c.d"};
Path[] matchedPath = prepareTesting(USER_DIR+"/ab\\[c.d", files); Path[] matchedPath = prepareTesting(USER_DIR+"/ab\\[c.d", files);

View File

@ -34,6 +34,7 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYPASSWORD_
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY;
import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains; import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
@ -73,9 +74,7 @@ import org.apache.hadoop.security.alias.CredentialProvider;
import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.security.alias.CredentialProviderFactory;
import org.apache.hadoop.security.alias.JavaKeyStoreProvider; import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.Shell;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Assume;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
@ -800,7 +799,7 @@ public class TestDFSUtil {
@Test (timeout=15000) @Test (timeout=15000)
public void testLocalhostReverseLookup() { public void testLocalhostReverseLookup() {
// 127.0.0.1 -> localhost reverse resolution does not happen on Windows. // 127.0.0.1 -> localhost reverse resolution does not happen on Windows.
Assume.assumeTrue(!Shell.WINDOWS); assumeNotWindows();
// Make sure when config FS_DEFAULT_NAME_KEY using IP address, // Make sure when config FS_DEFAULT_NAME_KEY using IP address,
// it will automatically convert it to hostname // it will automatically convert it to hostname

View File

@ -43,10 +43,10 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIP
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -451,7 +451,7 @@ public class TestBalancer {
// This test assumes stick-bit based block pin mechanism available only // This test assumes stick-bit based block pin mechanism available only
// in Linux/Unix. It can be unblocked on Windows when HDFS-7759 is ready to // in Linux/Unix. It can be unblocked on Windows when HDFS-7759 is ready to
// provide a different mechanism for Windows. // provide a different mechanism for Windows.
assumeTrue(!Path.WINDOWS); assumeNotWindows();
final Configuration conf = new HdfsConfiguration(); final Configuration conf = new HdfsConfiguration();
initConf(conf); initConf(conf);

View File

@ -17,9 +17,8 @@
*/ */
package org.apache.hadoop.hdfs.server.blockmanagement; package org.apache.hadoop.hdfs.server.blockmanagement;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
@ -70,7 +69,7 @@ public class TestRBWBlockInvalidation {
throws IOException, InterruptedException { throws IOException, InterruptedException {
// This test cannot pass on Windows due to file locking enforcement. It will // This test cannot pass on Windows due to file locking enforcement. It will
// reject the attempt to delete the block file from the RBW folder. // reject the attempt to delete the block file from the RBW folder.
assumeTrue(!Path.WINDOWS); assumeNotWindows();
Configuration conf = new HdfsConfiguration(); Configuration conf = new HdfsConfiguration();
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2);

View File

@ -72,6 +72,7 @@ import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer; import org.mockito.stubbing.Answer;
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.anyOf;
import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.CoreMatchers.not;
@ -81,7 +82,6 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Matchers.anyString; import static org.mockito.Matchers.anyString;
import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doAnswer;
@ -784,7 +784,7 @@ public class TestDataNodeHotSwapVolumes {
ReconfigurationException { ReconfigurationException {
// The test uses DataNodeTestUtils#injectDataDirFailure() to simulate // The test uses DataNodeTestUtils#injectDataDirFailure() to simulate
// volume failures which is currently not supported on Windows. // volume failures which is currently not supported on Windows.
assumeTrue(!Path.WINDOWS); assumeNotWindows();
startDFSCluster(1, 2); startDFSCluster(1, 2);
createFile(new Path("/test"), 32, (short)2); createFile(new Path("/test"), 32, (short)2);

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.hdfs.server.datanode; package org.apache.hadoop.hdfs.server.datanode;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
@ -24,7 +25,6 @@ import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -219,7 +219,7 @@ public class TestDataNodeVolumeFailure {
throws InterruptedException, IOException, TimeoutException { throws InterruptedException, IOException, TimeoutException {
// The test uses DataNodeTestUtils#injectDataDirFailure() to simulate // The test uses DataNodeTestUtils#injectDataDirFailure() to simulate
// volume failures which is currently not supported on Windows. // volume failures which is currently not supported on Windows.
assumeTrue(!Path.WINDOWS); assumeNotWindows();
Path file1 = new Path("/test1"); Path file1 = new Path("/test1");
DFSTestUtil.createFile(fs, file1, 1024, (short) 2, 1L); DFSTestUtil.createFile(fs, file1, 1024, (short) 2, 1L);
@ -384,7 +384,7 @@ public class TestDataNodeVolumeFailure {
public void testUnderReplicationAfterVolFailure() throws Exception { public void testUnderReplicationAfterVolFailure() throws Exception {
// The test uses DataNodeTestUtils#injectDataDirFailure() to simulate // The test uses DataNodeTestUtils#injectDataDirFailure() to simulate
// volume failures which is currently not supported on Windows. // volume failures which is currently not supported on Windows.
assumeTrue(!Path.WINDOWS); assumeNotWindows();
// Bring up one more datanode // Bring up one more datanode
cluster.startDataNodes(conf, 1, true, null, null); cluster.startDataNodes(conf, 1, true, null, null);

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.datanode;
import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
import static org.apache.hadoop.test.MetricsAsserts.getMetrics; import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
@ -26,7 +27,6 @@ import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.File; import java.io.File;
import java.util.ArrayList; import java.util.ArrayList;
@ -82,7 +82,7 @@ public class TestDataNodeVolumeFailureReporting {
public void setUp() throws Exception { public void setUp() throws Exception {
// These tests use DataNodeTestUtils#injectDataDirFailure() to simulate // These tests use DataNodeTestUtils#injectDataDirFailure() to simulate
// volume failures which is currently not supported on Windows. // volume failures which is currently not supported on Windows.
assumeTrue(!Path.WINDOWS); assumeNotWindows();
// Allow a single volume failure (there are two volumes) // Allow a single volume failure (there are two volumes)
initCluster(1, 2, 1); initCluster(1, 2, 1);
} }

View File

@ -17,10 +17,10 @@
*/ */
package org.apache.hadoop.hdfs.server.datanode; package org.apache.hadoop.hdfs.server.datanode;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import java.io.File; import java.io.File;
import java.io.IOException; import java.io.IOException;
@ -91,7 +91,7 @@ public class TestDataNodeVolumeFailureToleration {
*/ */
@Test @Test
public void testValidVolumesAtStartup() throws Exception { public void testValidVolumesAtStartup() throws Exception {
assumeTrue(!System.getProperty("os.name").startsWith("Windows")); assumeNotWindows();
// Make sure no DNs are running. // Make sure no DNs are running.
cluster.shutdownDataNodes(); cluster.shutdownDataNodes();
@ -139,7 +139,7 @@ public class TestDataNodeVolumeFailureToleration {
*/ */
@Test @Test
public void testConfigureMinValidVolumes() throws Exception { public void testConfigureMinValidVolumes() throws Exception {
assumeTrue(!System.getProperty("os.name").startsWith("Windows")); assumeNotWindows();
// Bring up two additional datanodes that need both of their volumes // Bring up two additional datanodes that need both of their volumes
// functioning in order to stay up. // functioning in order to stay up.
@ -218,7 +218,7 @@ public class TestDataNodeVolumeFailureToleration {
private void testVolumeConfig(int volumesTolerated, int volumesFailed, private void testVolumeConfig(int volumesTolerated, int volumesFailed,
boolean expectedBPServiceState, boolean manageDfsDirs) boolean expectedBPServiceState, boolean manageDfsDirs)
throws IOException, InterruptedException { throws IOException, InterruptedException {
assumeTrue(!System.getProperty("os.name").startsWith("Windows")); assumeNotWindows();
final int dnIndex = 0; final int dnIndex = 0;
// Fail the current directory since invalid storage directory perms // Fail the current directory since invalid storage directory perms
// get fixed up automatically on datanode startup. // get fixed up automatically on datanode startup.
@ -272,7 +272,7 @@ public class TestDataNodeVolumeFailureToleration {
*/ */
@Test @Test
public void testFailedVolumeOnStartupIsCounted() throws Exception { public void testFailedVolumeOnStartupIsCounted() throws Exception {
assumeTrue(!System.getProperty("os.name").startsWith("Windows")); assumeNotWindows();
final DatanodeManager dm = cluster.getNamesystem().getBlockManager( final DatanodeManager dm = cluster.getNamesystem().getBlockManager(
).getDatanodeManager(); ).getDatanodeManager();
long origCapacity = DFSTestUtil.getLiveDatanodeCapacity(dm); long origCapacity = DFSTestUtil.getLiveDatanodeCapacity(dm);

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.hdfs.server.datanode; package org.apache.hadoop.hdfs.server.datanode;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assume.assumeTrue; import static org.junit.Assume.assumeTrue;
import java.io.File; import java.io.File;
@ -96,7 +97,8 @@ public class TestFsDatasetCacheRevocation {
*/ */
@Test(timeout=120000) @Test(timeout=120000)
public void testPinning() throws Exception { public void testPinning() throws Exception {
assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS); assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
assumeNotWindows();
Configuration conf = getDefaultConf(); Configuration conf = getDefaultConf();
// Set a really long revocation timeout, so that we won't reach it during // Set a really long revocation timeout, so that we won't reach it during
// this test. // this test.
@ -146,7 +148,8 @@ public class TestFsDatasetCacheRevocation {
*/ */
@Test(timeout=120000) @Test(timeout=120000)
public void testRevocation() throws Exception { public void testRevocation() throws Exception {
assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS); assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
assumeNotWindows();
BlockReaderTestUtil.enableHdfsCachingTracing(); BlockReaderTestUtil.enableHdfsCachingTracing();
BlockReaderTestUtil.enableShortCircuitShmTracing(); BlockReaderTestUtil.enableShortCircuitShmTracing();
Configuration conf = getDefaultConf(); Configuration conf = getDefaultConf();

View File

@ -42,6 +42,7 @@ import java.util.concurrent.TimeoutException;
import static org.apache.hadoop.fs.StorageType.DEFAULT; import static org.apache.hadoop.fs.StorageType.DEFAULT;
import static org.apache.hadoop.fs.StorageType.RAM_DISK; import static org.apache.hadoop.fs.StorageType.RAM_DISK;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.core.Is.is; import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
@ -61,8 +62,8 @@ public class TestScrLazyPersistFiles extends LazyPersistTestCase {
@Before @Before
public void before() { public void before() {
Assume.assumeThat(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS, Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
equalTo(true)); assumeNotWindows();
Assume.assumeThat(DomainSocket.getLoadingFailureReason(), equalTo(null)); Assume.assumeThat(DomainSocket.getLoadingFailureReason(), equalTo(null));
final long osPageSize = NativeIO.POSIX.getCacheManipulator().getOperatingSystemPageSize(); final long osPageSize = NativeIO.POSIX.getCacheManipulator().getOperatingSystemPageSize();

View File

@ -17,11 +17,11 @@
*/ */
package org.apache.hadoop.hdfs.util; package org.apache.hadoop.hdfs.util;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.junit.Assume.assumeTrue;
import java.io.File; import java.io.File;
import java.io.FileNotFoundException; import java.io.FileNotFoundException;
@ -33,7 +33,6 @@ import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.test.PathUtils;
import org.apache.hadoop.util.Shell;
import org.junit.Before; import org.junit.Before;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
@ -129,7 +128,7 @@ public class TestAtomicFileOutputStream {
@Test @Test
public void testFailToRename() throws IOException { public void testFailToRename() throws IOException {
assumeTrue(Shell.WINDOWS); assumeWindows();
OutputStream fos = null; OutputStream fos = null;
try { try {
fos = new AtomicFileOutputStream(DST_FILE); fos = new AtomicFileOutputStream(DST_FILE);

View File

@ -17,6 +17,7 @@
*/ */
package org.apache.hadoop.tracing; package org.apache.hadoop.tracing;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assume.assumeTrue; import static org.junit.Assume.assumeTrue;
import java.io.File; import java.io.File;
@ -24,7 +25,6 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsTracer; import org.apache.hadoop.fs.FsTracer;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
@ -63,7 +63,8 @@ public class TestTracingShortCircuitLocalRead {
@Test @Test
public void testShortCircuitTraceHooks() throws IOException { public void testShortCircuitTraceHooks() throws IOException {
assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS); assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
assumeNotWindows();
conf = new Configuration(); conf = new Configuration();
conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX + conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX +
Tracer.SPAN_RECEIVER_CLASSES_KEY, Tracer.SPAN_RECEIVER_CLASSES_KEY,

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNotNull;
@ -65,7 +66,6 @@ import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.Shell;
import org.apache.hadoop.yarn.api.ApplicationClientProtocol; import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
@ -586,9 +586,7 @@ public class TestYARNRunner {
// the Windows behavior is different and this test currently doesn't really // the Windows behavior is different and this test currently doesn't really
// apply // apply
// MAPREDUCE-6588 should revisit this test // MAPREDUCE-6588 should revisit this test
if (Shell.WINDOWS) { assumeNotWindows();
return;
}
final String ADMIN_LIB_PATH = "foo"; final String ADMIN_LIB_PATH = "foo";
final String USER_LIB_PATH = "bar"; final String USER_LIB_PATH = "bar";

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.fs.azure; package org.apache.hadoop.fs.azure;
import static org.junit.Assume.assumeTrue; import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import org.apache.hadoop.fs.FSMainOperationsBaseTest; import org.apache.hadoop.fs.FSMainOperationsBaseTest;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -48,7 +48,7 @@ public class TestNativeAzureFileSystemOperationsMocked extends
System.out System.out
.println("Skipping testListStatusThrowsExceptionForUnreadableDir since WASB" .println("Skipping testListStatusThrowsExceptionForUnreadableDir since WASB"
+ " doesn't honor directory permissions."); + " doesn't honor directory permissions.");
assumeTrue(!Path.WINDOWS); assumeNotWindows();
} }
@Override @Override

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.fs.azure; package org.apache.hadoop.fs.azure;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import java.io.File; import java.io.File;
@ -26,7 +27,6 @@ import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.Shell;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -38,9 +38,7 @@ public class TestShellDecryptionKeyProvider {
@Test @Test
public void testScriptPathNotSpecified() throws Exception { public void testScriptPathNotSpecified() throws Exception {
if (!Shell.WINDOWS) { assumeWindows();
return;
}
ShellDecryptionKeyProvider provider = new ShellDecryptionKeyProvider(); ShellDecryptionKeyProvider provider = new ShellDecryptionKeyProvider();
Configuration conf = new Configuration(); Configuration conf = new Configuration();
String account = "testacct"; String account = "testacct";
@ -58,9 +56,7 @@ public class TestShellDecryptionKeyProvider {
@Test @Test
public void testValidScript() throws Exception { public void testValidScript() throws Exception {
if (!Shell.WINDOWS) { assumeWindows();
return;
}
String expectedResult = "decretedKey"; String expectedResult = "decretedKey";
// Create a simple script which echoes the given key plus the given // Create a simple script which echoes the given key plus the given

View File

@ -20,10 +20,10 @@ package org.apache.hadoop.yarn.util;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.util.Shell;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
public class TestWindowsBasedProcessTree { public class TestWindowsBasedProcessTree {
@ -45,10 +45,7 @@ public class TestWindowsBasedProcessTree {
@Test (timeout = 30000) @Test (timeout = 30000)
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public void tree() { public void tree() {
if( !Shell.WINDOWS) { assumeWindows();
LOG.info("Platform not Windows. Not testing");
return;
}
assertTrue("WindowsBasedProcessTree should be available on Windows", assertTrue("WindowsBasedProcessTree should be available on Windows",
WindowsBasedProcessTree.isAvailable()); WindowsBasedProcessTree.isAvailable());
ControlledClock testClock = new ControlledClock(); ControlledClock testClock = new ControlledClock();

View File

@ -31,8 +31,8 @@ import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import static org.junit.Assume.assumeTrue;
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
public class TestContainerExecutor { public class TestContainerExecutor {
@ -80,8 +80,7 @@ public class TestContainerExecutor {
@Test (timeout = 5000) @Test (timeout = 5000)
public void testRunCommandWithNoResources() { public void testRunCommandWithNoResources() {
// Windows only test assumeWindows();
assumeTrue(Shell.WINDOWS);
Configuration conf = new Configuration(); Configuration conf = new Configuration();
String[] command = containerExecutor.getRunCommand("echo", "group1", null, null, String[] command = containerExecutor.getRunCommand("echo", "group1", null, null,
conf, Resource.newInstance(1024, 1)); conf, Resource.newInstance(1024, 1));
@ -93,8 +92,7 @@ public class TestContainerExecutor {
@Test (timeout = 5000) @Test (timeout = 5000)
public void testRunCommandWithMemoryOnlyResources() { public void testRunCommandWithMemoryOnlyResources() {
// Windows only test assumeWindows();
assumeTrue(Shell.WINDOWS);
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_MEMORY_LIMIT_ENABLED, "true"); conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_MEMORY_LIMIT_ENABLED, "true");
String[] command = containerExecutor.getRunCommand("echo", "group1", null, null, String[] command = containerExecutor.getRunCommand("echo", "group1", null, null,
@ -107,8 +105,7 @@ public class TestContainerExecutor {
@Test (timeout = 5000) @Test (timeout = 5000)
public void testRunCommandWithCpuAndMemoryResources() { public void testRunCommandWithCpuAndMemoryResources() {
// Windows only test assumeWindows();
assumeTrue(Shell.WINDOWS);
int containerCores = 1; int containerCores = 1;
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_CPU_LIMIT_ENABLED, "true"); conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_CPU_LIMIT_ENABLED, "true");

View File

@ -18,9 +18,9 @@
package org.apache.hadoop.yarn.server.nodemanager; package org.apache.hadoop.yarn.server.nodemanager;
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assume.assumeTrue;
import static org.mockito.Matchers.any; import static org.mockito.Matchers.any;
import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
@ -123,7 +123,7 @@ public class TestLinuxContainerExecutorWithMocks {
@Before @Before
public void setup() throws IOException, ContainerExecutionException { public void setup() throws IOException, ContainerExecutionException {
assumeTrue(!Path.WINDOWS); assumeNotWindows();
tmpMockExecutor = System.getProperty("test.build.data") + tmpMockExecutor = System.getProperty("test.build.data") +
"/tmp-mock-container-executor"; "/tmp-mock-container-executor";

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher; package org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher;
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThat; import static org.junit.Assert.assertThat;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
@ -408,7 +409,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
public void testPrependDistcache() throws Exception { public void testPrependDistcache() throws Exception {
// Test is only relevant on Windows // Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS); assumeWindows();
ContainerLaunchContext containerLaunchContext = ContainerLaunchContext containerLaunchContext =
recordFactory.newRecordInstance(ContainerLaunchContext.class); recordFactory.newRecordInstance(ContainerLaunchContext.class);
@ -1129,7 +1130,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
String callCmd = "@call "; String callCmd = "@call ";
// Test is only relevant on Windows // Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS); assumeWindows();
// The tests are built on assuming 8191 max command line length // The tests are built on assuming 8191 max command line length
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH);
@ -1177,7 +1178,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
@Test (timeout = 10000) @Test (timeout = 10000)
public void testWindowsShellScriptBuilderEnv() throws IOException { public void testWindowsShellScriptBuilderEnv() throws IOException {
// Test is only relevant on Windows // Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS); assumeWindows();
// The tests are built on assuming 8191 max command line length // The tests are built on assuming 8191 max command line length
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH);
@ -1202,7 +1203,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
String mkDirCmd = "@if not exist \"\" mkdir \"\""; String mkDirCmd = "@if not exist \"\" mkdir \"\"";
// Test is only relevant on Windows // Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS); assumeWindows();
// The tests are built on assuming 8191 max command line length // The tests are built on assuming 8191 max command line length
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH);
@ -1225,7 +1226,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest {
@Test (timeout = 10000) @Test (timeout = 10000)
public void testWindowsShellScriptBuilderLink() throws IOException { public void testWindowsShellScriptBuilderLink() throws IOException {
// Test is only relevant on Windows // Test is only relevant on Windows
Assume.assumeTrue(Shell.WINDOWS); assumeWindows();
String linkCmd = "@" + Shell.getWinUtilsPath() + " symlink \"\" \"\""; String linkCmd = "@" + Shell.getWinUtilsPath() + " symlink \"\" \"\"";
// The tests are built on assuming 8191 max command line length // The tests are built on assuming 8191 max command line length