From 054dd2d39c66370a7905e545fd9babf18160abe6 Mon Sep 17 00:00:00 2001 From: Jonathan Turner Eagles Date: Fri, 3 Aug 2012 20:42:18 +0000 Subject: [PATCH] svn merge -c 1369197 FIXES: MAPREDUCE-4503. Should throw InvalidJobConfException if duplicates found in cacheArchives or cacheFiles (Robert Evans via jeagles) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1369201 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 3 + .../hadoop/mapreduce/v2/util/MRApps.java | 16 +++ .../hadoop/mapreduce/v2/util/TestMRApps.java | 129 +++++++++++++++++- 3 files changed, 146 insertions(+), 2 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 6fa04d5822d..b4ef59cc737 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -671,6 +671,9 @@ Release 0.23.3 - UNRELEASED MAPREDUCE-4504. SortValidator writes to wrong directory (Robert Evans via tgraves) + MAPREDUCE-4503. Should throw InvalidJobConfException if duplicates found in + cacheArchives or cacheFiles (Robert Evans via jeagles) + Release 0.23.2 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java index 32fc481ce1c..32817380ac9 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java @@ -35,6 +35,7 @@ import org.apache.hadoop.classification.InterfaceStability.Unstable; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapred.InvalidJobConfException; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TaskAttemptID; @@ -56,6 +57,7 @@ import org.apache.hadoop.yarn.api.records.LocalResourceVisibility; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.util.Apps; import org.apache.hadoop.yarn.util.BuilderUtils; +import org.apache.hadoop.yarn.util.ConverterUtils; /** * Helper class for MR applications @@ -264,6 +266,13 @@ public class MRApps extends Apps { DistributedCache.getFileClassPaths(conf)); } + private static String getResourceDescription(LocalResourceType type) { + if(type == LocalResourceType.ARCHIVE) { + return "cache archive (" + MRJobConfig.CACHE_ARCHIVES + ") "; + } + return "cache file (" + MRJobConfig.CACHE_FILES + ") "; + } + // TODO - Move this to MR! // Use TaskDistributedCacheManager.CacheFiles.makeCacheFiles(URI[], // long[], boolean[], Path[], FileType) @@ -309,6 +318,13 @@ public class MRApps extends Apps { throw new IllegalArgumentException("Resource name must be relative"); } String linkName = name.toUri().getPath(); + LocalResource orig = localResources.get(linkName); + if(orig != null && !orig.getResource().equals( + ConverterUtils.getYarnUrlFromURI(p.toUri()))) { + throw new InvalidJobConfException( + getResourceDescription(orig.getType()) + orig.getResource() + + " conflicts with " + getResourceDescription(type) + u); + } localResources.put( linkName, BuilderUtils.newLocalResource( diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java index b50b8f89bfc..8772508c992 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java @@ -19,25 +19,33 @@ package org.apache.hadoop.mapreduce.v2.util; import java.io.IOException; +import java.net.URI; import java.util.HashMap; import java.util.Map; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FilterFileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.mapred.InvalidJobConfException; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.MRJobConfig; +import org.apache.hadoop.mapreduce.filecache.DistributedCache; import org.apache.hadoop.mapreduce.v2.api.records.JobId; import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId; import org.apache.hadoop.mapreduce.v2.api.records.TaskId; import org.apache.hadoop.mapreduce.v2.api.records.TaskType; import org.apache.hadoop.mapreduce.v2.util.MRApps; -import org.apache.hadoop.yarn.YarnException; import org.apache.hadoop.yarn.api.records.ApplicationId; +import org.apache.hadoop.yarn.api.records.LocalResource; +import org.apache.hadoop.yarn.api.records.LocalResourceType; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider; import org.junit.Test; import static org.junit.Assert.*; +import static org.mockito.Mockito.*; public class TestMRApps { @@ -166,5 +174,122 @@ public class TestMRApps { assertNotSame("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking effect!", env_str.indexOf("$PWD:job.jar"), 0); } - + + @Test + public void testSetupDistributedCacheEmpty() throws IOException { + Configuration conf = new Configuration(); + Map localResources = new HashMap(); + MRApps.setupDistributedCache(conf, localResources); + assertTrue("Empty Config did not produce an empty list of resources", + localResources.isEmpty()); + } + + @SuppressWarnings("deprecation") + @Test(expected = InvalidJobConfException.class) + public void testSetupDistributedCacheConflicts() throws Exception { + Configuration conf = new Configuration(); + conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); + + URI mockUri = URI.create("mockfs://mock/"); + FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf)) + .getRawFileSystem(); + + URI archive = new URI("mockfs://mock/tmp/something.zip#something"); + Path archivePath = new Path(archive); + URI file = new URI("mockfs://mock/tmp/something.txt#something"); + Path filePath = new Path(file); + + when(mockFs.resolvePath(archivePath)).thenReturn(archivePath); + when(mockFs.resolvePath(filePath)).thenReturn(filePath); + + DistributedCache.addCacheArchive(archive, conf); + conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10"); + conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10"); + conf.set(MRJobConfig.CACHE_ARCHIVES_VISIBILITIES, "true"); + DistributedCache.addCacheFile(file, conf); + conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11"); + conf.set(MRJobConfig.CACHE_FILES_SIZES, "11"); + conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true"); + Map localResources = + new HashMap(); + MRApps.setupDistributedCache(conf, localResources); + } + + @SuppressWarnings("deprecation") + @Test(expected = InvalidJobConfException.class) + public void testSetupDistributedCacheConflictsFiles() throws Exception { + Configuration conf = new Configuration(); + conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); + + URI mockUri = URI.create("mockfs://mock/"); + FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf)) + .getRawFileSystem(); + + URI file = new URI("mockfs://mock/tmp/something.zip#something"); + Path filePath = new Path(file); + URI file2 = new URI("mockfs://mock/tmp/something.txt#something"); + Path file2Path = new Path(file); + + when(mockFs.resolvePath(filePath)).thenReturn(filePath); + when(mockFs.resolvePath(file2Path)).thenReturn(file2Path); + + DistributedCache.addCacheFile(file, conf); + DistributedCache.addCacheFile(file2, conf); + conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "10,11"); + conf.set(MRJobConfig.CACHE_FILES_SIZES, "10,11"); + conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true,true"); + Map localResources = + new HashMap(); + MRApps.setupDistributedCache(conf, localResources); + } + + @SuppressWarnings("deprecation") + @Test + public void testSetupDistributedCache() throws Exception { + Configuration conf = new Configuration(); + conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class); + + URI mockUri = URI.create("mockfs://mock/"); + FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf)) + .getRawFileSystem(); + + URI archive = new URI("mockfs://mock/tmp/something.zip"); + Path archivePath = new Path(archive); + URI file = new URI("mockfs://mock/tmp/something.txt#something"); + Path filePath = new Path(file); + + when(mockFs.resolvePath(archivePath)).thenReturn(archivePath); + when(mockFs.resolvePath(filePath)).thenReturn(filePath); + + DistributedCache.addCacheArchive(archive, conf); + conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10"); + conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10"); + conf.set(MRJobConfig.CACHE_ARCHIVES_VISIBILITIES, "true"); + DistributedCache.addCacheFile(file, conf); + conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11"); + conf.set(MRJobConfig.CACHE_FILES_SIZES, "11"); + conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true"); + Map localResources = + new HashMap(); + MRApps.setupDistributedCache(conf, localResources); + assertEquals(2, localResources.size()); + LocalResource lr = localResources.get("something.zip"); + assertNotNull(lr); + assertEquals(10l, lr.getSize()); + assertEquals(10l, lr.getTimestamp()); + assertEquals(LocalResourceType.ARCHIVE, lr.getType()); + lr = localResources.get("something"); + assertNotNull(lr); + assertEquals(11l, lr.getSize()); + assertEquals(11l, lr.getTimestamp()); + assertEquals(LocalResourceType.FILE, lr.getType()); + } + + static class MockFileSystem extends FilterFileSystem { + MockFileSystem() { + super(mock(FileSystem.class)); + } + public void initialize(URI name, Configuration conf) throws IOException {} + } + }