diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java index 5c44a985dd3..dff89f9669f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextPermissionBase.java @@ -34,6 +34,7 @@ import org.junit.Before; import org.junit.Test; import static org.apache.hadoop.fs.FileContextTestHelper.*; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.fail; @@ -98,10 +99,7 @@ public abstract class FileContextPermissionBase { @Test public void testCreatePermission() throws IOException { - if (Path.WINDOWS) { - System.out.println("Cannot run test for Windows"); - return; - } + assumeNotWindows(); String filename = "foo"; Path f = fileContextTestHelper.getTestRootPath(fc, filename); fileContextTestHelper.createFile(fc, filename); @@ -112,10 +110,7 @@ public abstract class FileContextPermissionBase { @Test public void testSetPermission() throws IOException { - if (Path.WINDOWS) { - System.out.println("Cannot run test for Windows"); - return; - } + assumeNotWindows(); String filename = "foo"; Path f = fileContextTestHelper.getTestRootPath(fc, filename); @@ -137,10 +132,7 @@ public abstract class FileContextPermissionBase { @Test public void testSetOwner() throws IOException { - if (Path.WINDOWS) { - System.out.println("Cannot run test for Windows"); - return; - } + assumeNotWindows(); String filename = "bar"; Path f = fileContextTestHelper.getTestRootPath(fc, filename); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java index 2116a4b7ca7..0b7519cf3ba 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFileUtil.java @@ -43,12 +43,12 @@ import java.util.zip.ZipOutputStream; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import org.apache.tools.tar.TarEntry; import org.apache.tools.tar.TarOutputStream; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.*; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -423,10 +423,8 @@ public class TestFileUtil { @Test (timeout = 30000) public void testFailFullyDelete() throws IOException { - if(Shell.WINDOWS) { - // windows Dir.setWritable(false) does not work for directories - return; - } + // Windows Dir.setWritable(false) does not work for directories + assumeNotWindows(); LOG.info("Running test to verify failure of fullyDelete()"); setupDirsAndNonWritablePermissions(); boolean ret = FileUtil.fullyDelete(new MyFile(del)); @@ -504,10 +502,8 @@ public class TestFileUtil { @Test (timeout = 30000) public void testFailFullyDeleteContents() throws IOException { - if(Shell.WINDOWS) { - // windows Dir.setWritable(false) does not work for directories - return; - } + // Windows Dir.setWritable(false) does not work for directories + assumeNotWindows(); LOG.info("Running test to verify failure of fullyDeleteContents()"); setupDirsAndNonWritablePermissions(); boolean ret = FileUtil.fullyDeleteContents(new MyFile(del)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java index 9e199ca7f84..6ca390508ac 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFsShellCopy.java @@ -18,13 +18,13 @@ package org.apache.hadoop.fs; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.IOException; @@ -143,7 +143,7 @@ public class TestFsShellCopy { @Test public void testCopyFileFromWindowsLocalPath() throws Exception { - assumeTrue(Path.WINDOWS); + assumeWindows(); String windowsTestRootPath = (new File(testRootDir.toUri().getPath() .toString())).getAbsolutePath(); Path testRoot = new Path(windowsTestRootPath, "testPutFile"); @@ -158,7 +158,7 @@ public class TestFsShellCopy { @Test public void testCopyDirFromWindowsLocalPath() throws Exception { - assumeTrue(Path.WINDOWS); + assumeWindows(); String windowsTestRootPath = (new File(testRootDir.toUri().getPath() .toString())).getAbsolutePath(); Path testRoot = new Path(windowsTestRootPath, "testPutDir"); @@ -485,7 +485,7 @@ public class TestFsShellCopy { @Test public void testMoveFromWindowsLocalPath() throws Exception { - assumeTrue(Path.WINDOWS); + assumeWindows(); Path testRoot = new Path(testRootDir, "testPutFile"); lfs.delete(testRoot, true); lfs.mkdirs(testRoot); @@ -504,7 +504,7 @@ public class TestFsShellCopy { @Test public void testGetWindowsLocalPath() throws Exception { - assumeTrue(Path.WINDOWS); + assumeWindows(); String winDstFile = (new File(dstPath.toUri().getPath() .toString())).getAbsolutePath(); shellRun(0, "-get", srcPath.toString(), winDstFile); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java index 8cbe28349d0..825efe046da 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalDirAllocator.java @@ -34,8 +34,8 @@ import org.junit.runners.Parameterized; import org.junit.runners.Parameterized.Parameters; import org.junit.Test; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.*; -import static org.junit.Assume.*; /** This test LocalDirAllocator works correctly; * Every test case uses different buffer dirs to @@ -57,8 +57,6 @@ public class TestLocalDirAllocator { final static private LocalDirAllocator dirAllocator = new LocalDirAllocator(CONTEXT); static LocalFileSystem localFs; - final static private boolean isWindows = - System.getProperty("os.name").startsWith("Windows"); final static int SMALL_FILE_SIZE = 100; final static private String RELATIVE = "/RELATIVE"; final static private String ABSOLUTE = "/ABSOLUTE"; @@ -132,7 +130,7 @@ public class TestLocalDirAllocator { */ @Test (timeout = 30000) public void test0() throws Exception { - if (isWindows) return; + assumeNotWindows(); String dir0 = buildBufferDir(ROOT, 0); String dir1 = buildBufferDir(ROOT, 1); try { @@ -154,7 +152,7 @@ public class TestLocalDirAllocator { */ @Test (timeout = 30000) public void testROBufferDirAndRWBufferDir() throws Exception { - if (isWindows) return; + assumeNotWindows(); String dir1 = buildBufferDir(ROOT, 1); String dir2 = buildBufferDir(ROOT, 2); try { @@ -174,7 +172,7 @@ public class TestLocalDirAllocator { */ @Test (timeout = 30000) public void testDirsNotExist() throws Exception { - if (isWindows) return; + assumeNotWindows(); String dir2 = buildBufferDir(ROOT, 2); String dir3 = buildBufferDir(ROOT, 3); try { @@ -200,7 +198,7 @@ public class TestLocalDirAllocator { */ @Test (timeout = 30000) public void testRWBufferDirBecomesRO() throws Exception { - if (isWindows) return; + assumeNotWindows(); String dir3 = buildBufferDir(ROOT, 3); String dir4 = buildBufferDir(ROOT, 4); try { @@ -238,7 +236,7 @@ public class TestLocalDirAllocator { static final int TRIALS = 100; @Test (timeout = 30000) public void testCreateManyFiles() throws Exception { - if (isWindows) return; + assumeNotWindows(); String dir5 = buildBufferDir(ROOT, 5); String dir6 = buildBufferDir(ROOT, 6); try { @@ -348,7 +346,7 @@ public class TestLocalDirAllocator { */ @Test (timeout = 30000) public void testNoSideEffects() throws IOException { - assumeTrue(!isWindows); + assumeNotWindows(); String dir = buildBufferDir(ROOT, 0); try { conf.set(CONTEXT, dir); @@ -370,7 +368,7 @@ public class TestLocalDirAllocator { */ @Test (timeout = 30000) public void testGetLocalPathToRead() throws IOException { - assumeTrue(!isWindows); + assumeNotWindows(); String dir = buildBufferDir(ROOT, 0); try { conf.set(CONTEXT, dir); @@ -395,7 +393,7 @@ public class TestLocalDirAllocator { */ @Test (timeout = 30000) public void testGetAllLocalPathsToRead() throws IOException { - assumeTrue(!isWindows); + assumeNotWindows(); String dir0 = buildBufferDir(ROOT, 0); String dir1 = buildBufferDir(ROOT, 1); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java index 3aadd2fb546..23113375e72 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystem.java @@ -21,7 +21,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem.Statistics; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.StringUtils; import static org.apache.hadoop.fs.FileSystemTestHelper.*; @@ -31,8 +30,9 @@ import java.net.URI; import java.util.Arrays; import java.util.Random; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.junit.Assert.*; -import static org.junit.Assume.assumeTrue; import static org.mockito.Mockito.*; import org.junit.After; @@ -287,7 +287,7 @@ public class TestLocalFileSystem { @Test(timeout = 1000) public void testListStatusWithColons() throws IOException { - assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); File colonFile = new File(TEST_ROOT_DIR, "foo:bar"); colonFile.mkdirs(); FileStatus[] stats = fileSys.listStatus(new Path(TEST_ROOT_DIR)); @@ -298,7 +298,7 @@ public class TestLocalFileSystem { @Test public void testListStatusReturnConsistentPathOnWindows() throws IOException { - assumeTrue(Shell.WINDOWS); + assumeWindows(); String dirNoDriveSpec = TEST_ROOT_DIR; if (dirNoDriveSpec.charAt(1) == ':') dirNoDriveSpec = dirNoDriveSpec.substring(2); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java index 11e94a78c8b..817285cc79a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFileSystemPermission.java @@ -26,7 +26,10 @@ import org.apache.hadoop.util.Shell; import java.io.*; import java.util.*; -import junit.framework.*; +import org.junit.Test; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -37,7 +40,7 @@ import static org.junit.Assert.assertThat; /** * This class tests the local file system via the FileSystem abstraction. */ -public class TestLocalFileSystemPermission extends TestCase { +public class TestLocalFileSystemPermission { public static final Logger LOGGER = LoggerFactory.getLogger(TestFcLocalFsPermission.class); @@ -71,11 +74,9 @@ public class TestLocalFileSystemPermission extends TestCase { assertTrue(!fs.exists(name)); } + @Test public void testLocalFSDirsetPermission() throws IOException { - if (Path.WINDOWS) { - LOGGER.info("Cannot run test for Windows"); - return; - } + assumeNotWindows(); LocalFileSystem localfs = FileSystem.getLocal(new Configuration()); Configuration conf = localfs.getConf(); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044"); @@ -124,11 +125,9 @@ public class TestLocalFileSystemPermission extends TestCase { } /** Test LocalFileSystem.setPermission */ + @Test public void testLocalFSsetPermission() throws IOException { - if (Path.WINDOWS) { - LOGGER.info("Cannot run test for Windows"); - return; - } + assumeNotWindows(); Configuration conf = new Configuration(); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044"); LocalFileSystem localfs = FileSystem.getLocal(conf); @@ -195,6 +194,7 @@ public class TestLocalFileSystemPermission extends TestCase { } /** Test LocalFileSystem.setOwner. */ + @Test public void testLocalFSsetOwner() throws IOException { if (Path.WINDOWS) { LOGGER.info("Cannot run test for Windows"); @@ -248,6 +248,7 @@ public class TestLocalFileSystemPermission extends TestCase { * 5. For this directory we expect 715 as permission not 755 * @throws Exception we can throw away all the exception. */ + @Test public void testSetUmaskInRealTime() throws Exception { if (Path.WINDOWS) { LOGGER.info("Cannot run test for Windows"); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java index e5b22f95a38..dc48a103430 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java @@ -31,9 +31,17 @@ import org.apache.hadoop.util.Shell; import com.google.common.base.Joiner; -import junit.framework.TestCase; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertFalse; +import static org.junit.Assert.assertTrue; +import static org.junit.Assert.fail; -public class TestPath extends TestCase { +/** + * Test Hadoop Filesystem Paths. + */ +public class TestPath { /** * Merge a bunch of Path objects into a sorted semicolon-separated * path string. @@ -242,9 +250,7 @@ public class TestPath extends TestCase { /** Test that Windows paths are correctly handled */ @Test (timeout = 5000) public void testWindowsPaths() throws URISyntaxException, IOException { - if (!Path.WINDOWS) { - return; - } + assumeWindows(); assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar"); assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar"); @@ -255,9 +261,7 @@ public class TestPath extends TestCase { /** Test invalid paths on Windows are correctly rejected */ @Test (timeout = 5000) public void testInvalidWindowsPaths() throws URISyntaxException, IOException { - if (!Path.WINDOWS) { - return; - } + assumeWindows(); String [] invalidPaths = { "hdfs:\\\\\\tmp" @@ -401,7 +405,7 @@ public class TestPath extends TestCase { @Test (timeout = 30000) public void testGlobEscapeStatus() throws Exception { // This test is not meaningful on Windows where * is disallowed in file name. - if (Shell.WINDOWS) return; + assumeNotWindows(); FileSystem lfs = FileSystem.getLocal(new Configuration()); Path testRoot = lfs.makeQualified( new Path(GenericTestUtils.getTempPath("testPathGlob"))); @@ -493,7 +497,7 @@ public class TestPath extends TestCase { @Test (timeout = 30000) public void testIsWindowsAbsolutePath() { - if (!Shell.WINDOWS) return; + assumeWindows(); assertTrue(Path.isWindowsAbsolutePath("C:\\test", false)); assertTrue(Path.isWindowsAbsolutePath("C:/test", false)); assertTrue(Path.isWindowsAbsolutePath("/C:/test", true)); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java index 8968e7ad167..0a51b659290 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFS.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.fs; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -29,7 +30,6 @@ import java.io.IOException; import java.net.URI; import java.net.URISyntaxException; -import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.util.Shell; import org.apache.hadoop.security.UserGroupInformation; import org.junit.Test; @@ -71,37 +71,37 @@ abstract public class TestSymlinkLocalFS extends SymlinkBaseTest { @Override public void testCreateDanglingLink() throws IOException { // Dangling symlinks are not supported on Windows local file system. - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); super.testCreateDanglingLink(); } @Override public void testCreateFileViaDanglingLinkParent() throws IOException { - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); super.testCreateFileViaDanglingLinkParent(); } @Override public void testOpenResolvesLinks() throws IOException { - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); super.testOpenResolvesLinks(); } @Override public void testRecursiveLinks() throws IOException { - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); super.testRecursiveLinks(); } @Override public void testRenameDirToDanglingSymlink() throws IOException { - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); super.testRenameDirToDanglingSymlink(); } @Override public void testStatDanglingLink() throws IOException { - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); super.testStatDanglingLink(); } @@ -126,7 +126,7 @@ abstract public class TestSymlinkLocalFS extends SymlinkBaseTest { @Test(timeout=1000) /** Stat and lstat a dangling link */ public void testDanglingLink() throws IOException { - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); Path fileAbs = new Path(testBaseDir1()+"/file"); Path fileQual = new Path(testURI().toString(), fileAbs); Path link = new Path(testBaseDir1()+"/linkToFile"); @@ -235,7 +235,7 @@ abstract public class TestSymlinkLocalFS extends SymlinkBaseTest { @Override public void testSetTimesDanglingLink() throws IOException { - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); super.testSetTimesDanglingLink(); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java index 7f506c6295e..301bf046cd2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileContext.java @@ -17,12 +17,11 @@ */ package org.apache.hadoop.fs; -import org.apache.hadoop.util.Shell; import org.junit.BeforeClass; import java.io.IOException; -import static org.junit.Assume.assumeTrue; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS { @@ -34,7 +33,7 @@ public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS { @Override public void testRenameFileWithDestParentSymlink() throws IOException { - assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); super.testRenameFileWithDestParentSymlink(); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java index a945ddd22bb..6fc2d62d8c1 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestSymlinkLocalFSFileSystem.java @@ -22,14 +22,13 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Options.Rename; -import org.apache.hadoop.util.Shell; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.junit.Assume.assumeTrue; public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS { @@ -64,7 +63,7 @@ public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS { @Override public void testRenameFileWithDestParentSymlink() throws IOException { - assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); super.testRenameFileWithDestParentSymlink(); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java index 36aaceed11c..8dc53244f3b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/sftp/TestSFTPFileSystem.java @@ -30,7 +30,6 @@ import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.hadoop.util.Shell; import org.apache.sshd.SshServer; import org.apache.sshd.common.NamedFactory; @@ -48,8 +47,8 @@ import org.junit.Rule; import org.junit.Test; import org.junit.rules.TestName; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.*; -import static org.junit.Assume.assumeTrue; public class TestSFTPFileSystem { @@ -99,7 +98,7 @@ public class TestSFTPFileSystem { @BeforeClass public static void setUp() throws Exception { // skip all tests if running on Windows - assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); startSshdServer(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java index e3e574afe45..f2656e66cf6 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/TestPathData.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.fs.shell; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; @@ -126,9 +127,7 @@ public class TestPathData { @Test (timeout = 5000) public void testToFileRawWindowsPaths() throws Exception { - if (!Path.WINDOWS) { - return; - } + assumeWindows(); // Can we handle raw Windows paths? The files need not exist for // these tests to succeed. @@ -155,9 +154,7 @@ public class TestPathData { @Test (timeout = 5000) public void testInvalidWindowsPath() throws Exception { - if (!Path.WINDOWS) { - return; - } + assumeWindows(); // Verify that the following invalid paths are rejected. String [] winPaths = { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java index e6f25dc2eea..b2445a33312 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/nativeio/TestNativeIO.java @@ -42,6 +42,8 @@ import org.junit.Test; import static org.junit.Assume.*; import static org.junit.Assert.*; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; @@ -107,9 +109,7 @@ public class TestNativeIO { */ @Test (timeout = 30000) public void testMultiThreadedFstat() throws Exception { - if (Path.WINDOWS) { - return; - } + assumeNotWindows(); final FileOutputStream fos = new FileOutputStream( new File(TEST_DIR, "testfstat")); @@ -165,9 +165,7 @@ public class TestNativeIO { @Test (timeout = 30000) public void testSetFilePointer() throws Exception { - if (!Path.WINDOWS) { - return; - } + assumeWindows(); LOG.info("Set a file pointer on Windows"); try { @@ -212,9 +210,7 @@ public class TestNativeIO { @Test (timeout = 30000) public void testCreateFile() throws Exception { - if (!Path.WINDOWS) { - return; - } + assumeWindows(); LOG.info("Open a file on Windows with SHARE_DELETE shared mode"); try { @@ -255,9 +251,7 @@ public class TestNativeIO { /** Validate access checks on Windows */ @Test (timeout = 30000) public void testAccess() throws Exception { - if (!Path.WINDOWS) { - return; - } + assumeWindows(); File testFile = new File(TEST_DIR, "testfileaccess"); assertTrue(testFile.createNewFile()); @@ -331,9 +325,7 @@ public class TestNativeIO { @Test (timeout = 30000) public void testOpenMissingWithoutCreate() throws Exception { - if (Path.WINDOWS) { - return; - } + assumeNotWindows(); LOG.info("Open a missing file without O_CREAT and it should fail"); try { @@ -348,9 +340,7 @@ public class TestNativeIO { @Test (timeout = 30000) public void testOpenWithCreate() throws Exception { - if (Path.WINDOWS) { - return; - } + assumeNotWindows(); LOG.info("Test creating a file with O_CREAT"); FileDescriptor fd = NativeIO.POSIX.open( @@ -382,9 +372,7 @@ public class TestNativeIO { */ @Test (timeout = 30000) public void testFDDoesntLeak() throws IOException { - if (Path.WINDOWS) { - return; - } + assumeNotWindows(); for (int i = 0; i < 10000; i++) { FileDescriptor fd = NativeIO.POSIX.open( @@ -403,9 +391,7 @@ public class TestNativeIO { */ @Test (timeout = 30000) public void testChmod() throws Exception { - if (Path.WINDOWS) { - return; - } + assumeNotWindows(); try { NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777); @@ -428,9 +414,7 @@ public class TestNativeIO { @Test (timeout = 30000) public void testPosixFadvise() throws Exception { - if (Path.WINDOWS) { - return; - } + assumeNotWindows(); FileInputStream fis = new FileInputStream("/dev/zero"); try { @@ -497,19 +481,13 @@ public class TestNativeIO { @Test (timeout = 30000) public void testGetUserName() throws IOException { - if (Path.WINDOWS) { - return; - } - + assumeNotWindows(); assertFalse(NativeIO.POSIX.getUserName(0).isEmpty()); } @Test (timeout = 30000) public void testGetGroupName() throws IOException { - if (Path.WINDOWS) { - return; - } - + assumeNotWindows(); assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty()); } @@ -647,8 +625,7 @@ public class TestNativeIO { @Test (timeout=10000) public void testNativePosixConsts() { - assumeTrue("Native POSIX constants not required for Windows", - !Path.WINDOWS); + assumeNotWindows("Native POSIX constants not required for Windows"); assertTrue("Native 0_RDONLY const not set", O_RDONLY >= 0); assertTrue("Native 0_WRONLY const not set", O_WRONLY >= 0); assertTrue("Native 0_RDWR const not set", O_RDWR >= 0); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java index a0bfe73f9ae..863d380ac3b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/net/TestDNS.java @@ -30,15 +30,14 @@ import javax.naming.NameNotFoundException; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Time; import org.junit.Test; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.hamcrest.CoreMatchers.not; import static org.hamcrest.core.Is.is; import static org.junit.Assert.*; -import static org.junit.Assume.assumeTrue; /** * Test host name and IP resolution and caching. @@ -197,7 +196,7 @@ public class TestDNS { */ @Test (timeout=60000) public void testLookupWithHostsFallback() throws Exception { - assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); final String oldHostname = changeDnsCachedHostname(DUMMY_HOSTNAME); try { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java index e3952475691..d589c3a3467 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestShellBasedIdMapping.java @@ -17,9 +17,9 @@ */ package org.apache.hadoop.security; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.FileOutputStream; @@ -28,7 +28,6 @@ import java.io.OutputStream; import java.util.Map; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.security.ShellBasedIdMapping.PassThroughMap; import org.apache.hadoop.security.ShellBasedIdMapping.StaticMapping; import org.junit.Test; @@ -87,7 +86,7 @@ public class TestShellBasedIdMapping { @Test public void testStaticMapping() throws IOException { - assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); Map uidStaticMap = new PassThroughMap(); Map gidStaticMap = new PassThroughMap(); @@ -129,7 +128,7 @@ public class TestShellBasedIdMapping { // Test staticMap refreshing @Test public void testStaticMapUpdate() throws IOException { - assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); File tempStaticMapFile = File.createTempFile("nfs-", ".map"); tempStaticMapFile.delete(); Configuration conf = new Configuration(); @@ -207,7 +206,7 @@ public class TestShellBasedIdMapping { @Test public void testDuplicates() throws IOException { - assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); String GET_ALL_USERS_CMD = "echo \"root:x:0:0:root:/root:/bin/bash\n" + "hdfs:x:11501:10787:Grid Distributed File System:/home/hdfs:/bin/bash\n" + "hdfs:x:11502:10788:Grid Distributed File System:/home/hdfs:/bin/bash\n" @@ -247,7 +246,7 @@ public class TestShellBasedIdMapping { @Test public void testIdOutOfIntegerRange() throws IOException { - assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); String GET_ALL_USERS_CMD = "echo \"" + "nfsnobody:x:4294967294:4294967294:Anonymous NFS User:/var/lib/nfs:/sbin/nologin\n" + "nfsnobody1:x:4294967295:4294967295:Anonymous NFS User:/var/lib/nfs1:/sbin/nologin\n" diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/PlatformAssumptions.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/PlatformAssumptions.java new file mode 100644 index 00000000000..4e831625023 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/PlatformAssumptions.java @@ -0,0 +1,47 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.test; + +import org.junit.internal.AssumptionViolatedException; + +/** + * JUnit assumptions for the environment (OS). + */ +public final class PlatformAssumptions { + public static final String OS_NAME = System.getProperty("os.name"); + public static final boolean WINDOWS = OS_NAME.startsWith("Windows"); + + private PlatformAssumptions() { } + + public static void assumeNotWindows() { + assumeNotWindows("Expected Unix-like platform but got " + OS_NAME); + } + + public static void assumeNotWindows(String message) { + if (WINDOWS) { + throw new AssumptionViolatedException(message); + } + } + + public static void assumeWindows() { + if (!WINDOWS) { + throw new AssumptionViolatedException( + "Expected Windows platform but got " + OS_NAME); + } + } +} diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java index cfa97f4a099..e45890cb04a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/TestWinUtils.java @@ -18,8 +18,8 @@ package org.apache.hadoop.util; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.junit.Assert.*; -import static org.junit.Assume.assumeTrue; import static org.junit.matchers.JUnitMatchers.containsString; import java.io.File; @@ -53,7 +53,7 @@ public class TestWinUtils { @Before public void setUp() throws IOException { // Not supported on non-Windows platforms - assumeTrue(Shell.WINDOWS); + assumeWindows(); TEST_DIR.mkdirs(); assertTrue("Failed to create Test directory " + TEST_DIR, TEST_DIR.isDirectory() ); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java index 30778e6970b..32d960ad6fe 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestGlobPaths.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.fs; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.*; import java.io.IOException; -import java.security.PrivilegedExceptionAction; import java.util.ArrayList; import java.util.UUID; import java.util.regex.Pattern; @@ -495,7 +495,7 @@ public class TestGlobPaths { public void pTestEscape() throws IOException { // Skip the test case on Windows because backslash will be treated as a // path separator instead of an escaping character on Windows. - org.junit.Assume.assumeTrue(!Path.WINDOWS); + assumeNotWindows(); try { String [] files = new String[] {USER_DIR+"/ab\\[c.d"}; Path[] matchedPath = prepareTesting(USER_DIR+"/ab\\[c.d", files); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java index b139845ece6..bef5e330276 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java @@ -34,6 +34,7 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYPASSWORD_ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY; import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.hamcrest.CoreMatchers.not; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; @@ -73,9 +74,7 @@ import org.apache.hadoop.security.alias.CredentialProvider; import org.apache.hadoop.security.alias.CredentialProviderFactory; import org.apache.hadoop.security.alias.JavaKeyStoreProvider; import org.apache.hadoop.test.GenericTestUtils; -import org.apache.hadoop.util.Shell; import org.junit.Assert; -import org.junit.Assume; import org.junit.Before; import org.junit.Test; @@ -800,7 +799,7 @@ public class TestDFSUtil { @Test (timeout=15000) public void testLocalhostReverseLookup() { // 127.0.0.1 -> localhost reverse resolution does not happen on Windows. - Assume.assumeTrue(!Shell.WINDOWS); + assumeNotWindows(); // Make sure when config FS_DEFAULT_NAME_KEY using IP address, // it will automatically convert it to hostname diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java index 788f1e5c4b9..73a4cbca867 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java @@ -43,10 +43,10 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KERBEROS_PRINCIP import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.IOException; @@ -451,7 +451,7 @@ public class TestBalancer { // This test assumes stick-bit based block pin mechanism available only // in Linux/Unix. It can be unblocked on Windows when HDFS-7759 is ready to // provide a different mechanism for Windows. - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); final Configuration conf = new HdfsConfiguration(); initConf(conf); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java index 744e9faf9d3..9816af840ee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java @@ -17,9 +17,8 @@ */ package org.apache.hadoop.hdfs.server.blockmanagement; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; import java.io.Closeable; import java.io.IOException; @@ -70,7 +69,7 @@ public class TestRBWBlockInvalidation { throws IOException, InterruptedException { // This test cannot pass on Windows due to file locking enforcement. It will // reject the attempt to delete the block file from the RBW folder. - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); Configuration conf = new HdfsConfiguration(); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java index c03b02b0ad6..0dbb09c4d16 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeHotSwapVolumes.java @@ -72,6 +72,7 @@ import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.hamcrest.CoreMatchers.anyOf; import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.not; @@ -81,7 +82,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.junit.Assume.assumeTrue; import static org.mockito.Matchers.any; import static org.mockito.Matchers.anyString; import static org.mockito.Mockito.doAnswer; @@ -784,7 +784,7 @@ public class TestDataNodeHotSwapVolumes { ReconfigurationException { // The test uses DataNodeTestUtils#injectDataDirFailure() to simulate // volume failures which is currently not supported on Windows. - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); startDFSCluster(1, 2); createFile(new Path("/test"), 32, (short)2); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java index 00c2f6279c3..6792ba8af78 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailure.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hdfs.server.datanode; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; @@ -24,7 +25,6 @@ import static org.junit.Assert.assertNotEquals; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.IOException; @@ -219,7 +219,7 @@ public class TestDataNodeVolumeFailure { throws InterruptedException, IOException, TimeoutException { // The test uses DataNodeTestUtils#injectDataDirFailure() to simulate // volume failures which is currently not supported on Windows. - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); Path file1 = new Path("/test1"); DFSTestUtil.createFile(fs, file1, 1024, (short) 2, 1L); @@ -384,7 +384,7 @@ public class TestDataNodeVolumeFailure { public void testUnderReplicationAfterVolFailure() throws Exception { // The test uses DataNodeTestUtils#injectDataDirFailure() to simulate // volume failures which is currently not supported on Windows. - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); // Bring up one more datanode cluster.startDataNodes(conf, 1, true, null, null); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureReporting.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureReporting.java index c76fa2cdffa..6c587431949 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureReporting.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureReporting.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.server.datanode; import static org.apache.hadoop.test.MetricsAsserts.assertCounter; import static org.apache.hadoop.test.MetricsAsserts.getMetrics; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; @@ -26,7 +27,6 @@ import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertThat; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; import java.io.File; import java.util.ArrayList; @@ -82,7 +82,7 @@ public class TestDataNodeVolumeFailureReporting { public void setUp() throws Exception { // These tests use DataNodeTestUtils#injectDataDirFailure() to simulate // volume failures which is currently not supported on Windows. - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); // Allow a single volume failure (there are two volumes) initCluster(1, 2, 1); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java index 2f8239e3275..5ff7d9b06b8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java @@ -17,10 +17,10 @@ */ package org.apache.hadoop.hdfs.server.datanode; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.IOException; @@ -91,7 +91,7 @@ public class TestDataNodeVolumeFailureToleration { */ @Test public void testValidVolumesAtStartup() throws Exception { - assumeTrue(!System.getProperty("os.name").startsWith("Windows")); + assumeNotWindows(); // Make sure no DNs are running. cluster.shutdownDataNodes(); @@ -139,7 +139,7 @@ public class TestDataNodeVolumeFailureToleration { */ @Test public void testConfigureMinValidVolumes() throws Exception { - assumeTrue(!System.getProperty("os.name").startsWith("Windows")); + assumeNotWindows(); // Bring up two additional datanodes that need both of their volumes // functioning in order to stay up. @@ -218,7 +218,7 @@ public class TestDataNodeVolumeFailureToleration { private void testVolumeConfig(int volumesTolerated, int volumesFailed, boolean expectedBPServiceState, boolean manageDfsDirs) throws IOException, InterruptedException { - assumeTrue(!System.getProperty("os.name").startsWith("Windows")); + assumeNotWindows(); final int dnIndex = 0; // Fail the current directory since invalid storage directory perms // get fixed up automatically on datanode startup. @@ -272,7 +272,7 @@ public class TestDataNodeVolumeFailureToleration { */ @Test public void testFailedVolumeOnStartupIsCounted() throws Exception { - assumeTrue(!System.getProperty("os.name").startsWith("Windows")); + assumeNotWindows(); final DatanodeManager dm = cluster.getNamesystem().getBlockManager( ).getDatanodeManager(); long origCapacity = DFSTestUtil.getLiveDatanodeCapacity(dm); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestFsDatasetCacheRevocation.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestFsDatasetCacheRevocation.java index ce37abdfdd8..40de32066ae 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestFsDatasetCacheRevocation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestFsDatasetCacheRevocation.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hdfs.server.datanode; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assume.assumeTrue; import java.io.File; @@ -96,7 +97,8 @@ public class TestFsDatasetCacheRevocation { */ @Test(timeout=120000) public void testPinning() throws Exception { - assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS); + assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + assumeNotWindows(); Configuration conf = getDefaultConf(); // Set a really long revocation timeout, so that we won't reach it during // this test. @@ -146,7 +148,8 @@ public class TestFsDatasetCacheRevocation { */ @Test(timeout=120000) public void testRevocation() throws Exception { - assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS); + assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + assumeNotWindows(); BlockReaderTestUtil.enableHdfsCachingTracing(); BlockReaderTestUtil.enableShortCircuitShmTracing(); Configuration conf = getDefaultConf(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java index f598a0728ef..7043227fe77 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/fsdataset/impl/TestScrLazyPersistFiles.java @@ -42,6 +42,7 @@ import java.util.concurrent.TimeoutException; import static org.apache.hadoop.fs.StorageType.DEFAULT; import static org.apache.hadoop.fs.StorageType.RAM_DISK; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.core.Is.is; import static org.junit.Assert.assertThat; @@ -61,8 +62,8 @@ public class TestScrLazyPersistFiles extends LazyPersistTestCase { @Before public void before() { - Assume.assumeThat(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS, - equalTo(true)); + Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + assumeNotWindows(); Assume.assumeThat(DomainSocket.getLoadingFailureReason(), equalTo(null)); final long osPageSize = NativeIO.POSIX.getCacheManipulator().getOperatingSystemPageSize(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java index b9946c5a29b..c4ae8ce79b5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestAtomicFileOutputStream.java @@ -17,11 +17,11 @@ */ package org.apache.hadoop.hdfs.util; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; -import static org.junit.Assume.assumeTrue; import java.io.File; import java.io.FileNotFoundException; @@ -33,7 +33,6 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.test.PathUtils; -import org.apache.hadoop.util.Shell; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -129,7 +128,7 @@ public class TestAtomicFileOutputStream { @Test public void testFailToRename() throws IOException { - assumeTrue(Shell.WINDOWS); + assumeWindows(); OutputStream fos = null; try { fos = new AtomicFileOutputStream(DST_FILE); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java index b3cf4021107..03131f3943b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/tracing/TestTracingShortCircuitLocalRead.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.tracing; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assume.assumeTrue; import java.io.File; @@ -24,7 +25,6 @@ import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FSDataInputStream; -import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FsTracer; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.DFSConfigKeys; @@ -63,7 +63,8 @@ public class TestTracingShortCircuitLocalRead { @Test public void testShortCircuitTraceHooks() throws IOException { - assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS); + assumeTrue(NativeCodeLoader.isNativeCodeLoaded()); + assumeNotWindows(); conf = new Configuration(); conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX + Tracer.SPAN_RECEIVER_CLASSES_KEY, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java index abf2e72e0d1..0e17ac84735 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java @@ -18,6 +18,7 @@ package org.apache.hadoop.mapred; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; @@ -65,7 +66,6 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; -import org.apache.hadoop.util.Shell; import org.apache.hadoop.yarn.api.ApplicationClientProtocol; import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; @@ -586,9 +586,7 @@ public class TestYARNRunner { // the Windows behavior is different and this test currently doesn't really // apply // MAPREDUCE-6588 should revisit this test - if (Shell.WINDOWS) { - return; - } + assumeNotWindows(); final String ADMIN_LIB_PATH = "foo"; final String USER_LIB_PATH = "bar"; diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemOperationsMocked.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemOperationsMocked.java index f01829bd9cd..69c942d3778 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemOperationsMocked.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemOperationsMocked.java @@ -18,7 +18,7 @@ package org.apache.hadoop.fs.azure; -import static org.junit.Assume.assumeTrue; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import org.apache.hadoop.fs.FSMainOperationsBaseTest; import org.apache.hadoop.fs.FileSystem; @@ -48,7 +48,7 @@ public class TestNativeAzureFileSystemOperationsMocked extends System.out .println("Skipping testListStatusThrowsExceptionForUnreadableDir since WASB" + " doesn't honor directory permissions."); - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); } @Override diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestShellDecryptionKeyProvider.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestShellDecryptionKeyProvider.java index 2284d1f19cb..0bf33d8138b 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestShellDecryptionKeyProvider.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestShellDecryptionKeyProvider.java @@ -18,6 +18,7 @@ package org.apache.hadoop.fs.azure; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.junit.Assert.assertEquals; import java.io.File; @@ -26,7 +27,6 @@ import org.apache.commons.io.FileUtils; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.util.Shell; import org.junit.Assert; import org.junit.Test; @@ -38,9 +38,7 @@ public class TestShellDecryptionKeyProvider { @Test public void testScriptPathNotSpecified() throws Exception { - if (!Shell.WINDOWS) { - return; - } + assumeWindows(); ShellDecryptionKeyProvider provider = new ShellDecryptionKeyProvider(); Configuration conf = new Configuration(); String account = "testacct"; @@ -58,9 +56,7 @@ public class TestShellDecryptionKeyProvider { @Test public void testValidScript() throws Exception { - if (!Shell.WINDOWS) { - return; - } + assumeWindows(); String expectedResult = "decretedKey"; // Create a simple script which echoes the given key plus the given diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java index cbab1edf33e..569970288ac 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/util/TestWindowsBasedProcessTree.java @@ -20,10 +20,10 @@ package org.apache.hadoop.yarn.util; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; -import org.apache.hadoop.util.Shell; import org.junit.Assert; import org.junit.Test; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.junit.Assert.assertTrue; public class TestWindowsBasedProcessTree { @@ -45,10 +45,7 @@ public class TestWindowsBasedProcessTree { @Test (timeout = 30000) @SuppressWarnings("deprecation") public void tree() { - if( !Shell.WINDOWS) { - LOG.info("Platform not Windows. Not testing"); - return; - } + assumeWindows(); assertTrue("WindowsBasedProcessTree should be available on Windows", WindowsBasedProcessTree.isAvailable()); ControlledClock testClock = new ControlledClock(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java index bc87b0331b6..396c8f4e69a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestContainerExecutor.java @@ -31,8 +31,8 @@ import org.apache.hadoop.yarn.util.ResourceCalculatorPlugin; import org.junit.Assert; import org.junit.Test; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.junit.Assert.*; -import static org.junit.Assume.assumeTrue; @SuppressWarnings("deprecation") public class TestContainerExecutor { @@ -80,8 +80,7 @@ public class TestContainerExecutor { @Test (timeout = 5000) public void testRunCommandWithNoResources() { - // Windows only test - assumeTrue(Shell.WINDOWS); + assumeWindows(); Configuration conf = new Configuration(); String[] command = containerExecutor.getRunCommand("echo", "group1", null, null, conf, Resource.newInstance(1024, 1)); @@ -93,8 +92,7 @@ public class TestContainerExecutor { @Test (timeout = 5000) public void testRunCommandWithMemoryOnlyResources() { - // Windows only test - assumeTrue(Shell.WINDOWS); + assumeWindows(); Configuration conf = new Configuration(); conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_MEMORY_LIMIT_ENABLED, "true"); String[] command = containerExecutor.getRunCommand("echo", "group1", null, null, @@ -107,8 +105,7 @@ public class TestContainerExecutor { @Test (timeout = 5000) public void testRunCommandWithCpuAndMemoryResources() { - // Windows only test - assumeTrue(Shell.WINDOWS); + assumeWindows(); int containerCores = 1; Configuration conf = new Configuration(); conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_CPU_LIMIT_ENABLED, "true"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java index aae003784a3..ae5a011172f 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestLinuxContainerExecutorWithMocks.java @@ -18,9 +18,9 @@ package org.apache.hadoop.yarn.server.nodemanager; +import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertTrue; -import static org.junit.Assume.assumeTrue; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -123,7 +123,7 @@ public class TestLinuxContainerExecutorWithMocks { @Before public void setup() throws IOException, ContainerExecutionException { - assumeTrue(!Path.WINDOWS); + assumeNotWindows(); tmpMockExecutor = System.getProperty("test.build.data") + "/tmp-mock-container-executor"; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java index 6b32dd9af86..a06822a903a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/launcher/TestContainerLaunch.java @@ -18,6 +18,7 @@ package org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher; +import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -408,7 +409,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest { public void testPrependDistcache() throws Exception { // Test is only relevant on Windows - Assume.assumeTrue(Shell.WINDOWS); + assumeWindows(); ContainerLaunchContext containerLaunchContext = recordFactory.newRecordInstance(ContainerLaunchContext.class); @@ -1129,7 +1130,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest { String callCmd = "@call "; // Test is only relevant on Windows - Assume.assumeTrue(Shell.WINDOWS); + assumeWindows(); // The tests are built on assuming 8191 max command line length assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); @@ -1177,7 +1178,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest { @Test (timeout = 10000) public void testWindowsShellScriptBuilderEnv() throws IOException { // Test is only relevant on Windows - Assume.assumeTrue(Shell.WINDOWS); + assumeWindows(); // The tests are built on assuming 8191 max command line length assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); @@ -1202,7 +1203,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest { String mkDirCmd = "@if not exist \"\" mkdir \"\""; // Test is only relevant on Windows - Assume.assumeTrue(Shell.WINDOWS); + assumeWindows(); // The tests are built on assuming 8191 max command line length assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH); @@ -1225,7 +1226,7 @@ public class TestContainerLaunch extends BaseContainerManagerTest { @Test (timeout = 10000) public void testWindowsShellScriptBuilderLink() throws IOException { // Test is only relevant on Windows - Assume.assumeTrue(Shell.WINDOWS); + assumeWindows(); String linkCmd = "@" + Shell.getWinUtilsPath() + " symlink \"\" \"\""; // The tests are built on assuming 8191 max command line length