From 5e007776f62a2390ea2a6d6938114ddde25a7f60 Mon Sep 17 00:00:00 2001 From: Suresh Srinivas Date: Fri, 24 May 2013 16:19:53 +0000 Subject: [PATCH] MAPREDUCE-4987. Merge r1470003 from trunk git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1486106 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 4 ++ .../hadoop/mapreduce/v2/TestMRJobs.java | 68 +++++++++++++++---- 2 files changed, 59 insertions(+), 13 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 4089095015b..de98617992d 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -143,6 +143,10 @@ Release 2.0.5-beta - UNRELEASED MAPREDUCE-4885. Streaming tests have multiple failures on Windows. (Chris Nauroth via bikas) + MAPREDUCE-4987. TestMRJobs#testDistributedCache fails on Windows due to + classpath problems and unexpected behavior of symlinks (Chris Nauroth via + bikas) + BREAKDOWN OF HADOOP-8562 SUBTASKS MAPREDUCE-4739. Some MapReduce tests fail to find winutils. diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java index 698b67b6ccf..cb84d30510e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobs.java @@ -18,11 +18,13 @@ package org.apache.hadoop.mapreduce.v2; +import java.io.BufferedReader; import java.io.File; import java.io.FileInputStream; import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; +import java.io.StringReader; import java.net.URI; import java.security.PrivilegedExceptionAction; import java.util.HashMap; @@ -47,6 +49,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Text; @@ -71,6 +74,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.util.JarFinder; +import org.apache.hadoop.util.Shell; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.junit.AfterClass; import org.junit.Assert; @@ -93,13 +97,6 @@ public class TestMRJobs { } catch (IOException io) { throw new RuntimeException("problem getting local fs", io); } - try { - dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2) - .format(true).racks(null).build(); - remoteFs = dfsCluster.getFileSystem(); - } catch (IOException io) { - throw new RuntimeException("problem starting mini dfs cluster", io); - } } private static Path TEST_ROOT_DIR = new Path("target", @@ -110,6 +107,13 @@ public class TestMRJobs { @BeforeClass public static void setup() throws IOException { + try { + dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2) + .format(true).racks(null).build(); + remoteFs = dfsCluster.getFileSystem(); + } catch (IOException io) { + throw new RuntimeException("problem starting mini dfs cluster", io); + } if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) { LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR @@ -215,7 +219,7 @@ public class TestMRJobs { } } - @Test (timeout = 30000) + @Test (timeout = 60000) public void testRandomWriter() throws IOException, InterruptedException, ClassNotFoundException { @@ -277,7 +281,7 @@ public class TestMRJobs { && counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue() != 0); } - @Test (timeout = 30000) + @Test (timeout = 60000) public void testFailingMapper() throws IOException, InterruptedException, ClassNotFoundException { @@ -359,7 +363,7 @@ public class TestMRJobs { return job; } - //@Test (timeout = 30000) + //@Test (timeout = 60000) public void testSleepJobWithSecurityOn() throws IOException, InterruptedException, ClassNotFoundException { @@ -467,8 +471,46 @@ public class TestMRJobs { // Check that the symlink for the Job Jar was created in the cwd and // points to the extracted directory File jobJarDir = new File("job.jar"); - Assert.assertTrue(FileUtils.isSymlink(jobJarDir)); - Assert.assertTrue(jobJarDir.isDirectory()); + if (Shell.WINDOWS) { + Assert.assertTrue(isWindowsSymlinkedDirectory(jobJarDir)); + } else { + Assert.assertTrue(FileUtils.isSymlink(jobJarDir)); + Assert.assertTrue(jobJarDir.isDirectory()); + } + } + + /** + * Used on Windows to determine if the specified file is a symlink that + * targets a directory. On most platforms, these checks can be done using + * commons-io. On Windows, the commons-io implementation is unreliable and + * always returns false. Instead, this method checks the output of the dir + * command. After migrating to Java 7, this method can be removed in favor + * of the new method java.nio.file.Files.isSymbolicLink, which is expected to + * work cross-platform. + * + * @param file File to check + * @return boolean true if the file is a symlink that targets a directory + * @throws IOException thrown for any I/O error + */ + private static boolean isWindowsSymlinkedDirectory(File file) + throws IOException { + String dirOut = Shell.execCommand("cmd", "/c", "dir", + file.getAbsoluteFile().getParent()); + StringReader sr = new StringReader(dirOut); + BufferedReader br = new BufferedReader(sr); + try { + String line = br.readLine(); + while (line != null) { + line = br.readLine(); + if (line.contains(file.getName()) && line.contains("")) { + return true; + } + } + return false; + } finally { + IOUtils.closeStream(br); + IOUtils.closeStream(sr); + } } /** @@ -542,7 +584,7 @@ public class TestMRJobs { trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/")); } - @Test (timeout = 300000) + @Test (timeout = 600000) public void testDistributedCache() throws Exception { // Test with a local (file:///) Job Jar Path localJobJarPath = makeJobJarWithLib(TEST_ROOT_DIR.toUri().toString());