diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index a40e6ca6531..671523bc400 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -38,6 +38,9 @@ Release 2.6.0 - UNRELEASED MAPREDUCE-6014. New task status field in task attempts table can lead to an empty web page (Mit Desai via jlowe) + MAPREDUCE-6021. MR AM should have working directory in LD_LIBRARY_PATH + (jlowe) + Release 2.5.0 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java index 80765cf55b4..5120c85f41b 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java @@ -447,6 +447,15 @@ public class YARNRunner implements ClientProtocol { Map environment = new HashMap(); MRApps.setClasspath(environment, conf); + // Shell + environment.put(Environment.SHELL.name(), + conf.get(MRJobConfig.MAPRED_ADMIN_USER_SHELL, + MRJobConfig.DEFAULT_SHELL)); + + // Add the container working directory at the front of LD_LIBRARY_PATH + MRApps.addToEnvironment(environment, Environment.LD_LIBRARY_PATH.name(), + MRApps.crossPlatformifyMREnv(conf, Environment.PWD), conf); + // Setup the environment variables for Admin first MRApps.setEnvFromInputString(environment, conf.get(MRJobConfig.MR_AM_ADMIN_USER_ENV), conf); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java index 39dec0607b4..38a79fbb9a7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestYARNRunner.java @@ -29,6 +29,7 @@ import static org.mockito.Mockito.when; import java.io.ByteArrayOutputStream; import java.io.File; +import java.io.FileNotFoundException; import java.io.FileOutputStream; import java.io.IOException; import java.io.OutputStream; @@ -36,6 +37,7 @@ import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.security.PrivilegedExceptionAction; import java.util.List; +import java.util.Map; import junit.framework.TestCase; @@ -44,22 +46,27 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.FileContext; +import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobPriority; import org.apache.hadoop.mapreduce.JobStatus.State; +import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.TypeConverter; import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol; import org.apache.hadoop.mapreduce.v2.api.MRDelegationTokenIdentifier; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenRequest; import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDelegationTokenResponse; +import org.apache.hadoop.mapreduce.v2.util.MRApps; import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.yarn.api.ApplicationClientProtocol; +import org.apache.hadoop.yarn.api.ApplicationConstants; +import org.apache.hadoop.yarn.api.ApplicationConstants.Environment; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse; import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportRequest; @@ -94,6 +101,7 @@ import org.apache.log4j.Layout; import org.apache.log4j.Logger; import org.apache.log4j.SimpleLayout; import org.apache.log4j.WriterAppender; +import org.junit.After; import org.junit.Before; import org.junit.Test; import org.mockito.invocation.InvocationOnMock; @@ -146,8 +154,12 @@ public class TestYARNRunner extends TestCase { FileContext.getLocalFSFileContext().delete(new Path(testWorkDir.toString()), true); } testWorkDir.mkdirs(); - } + } + @After + public void cleanup() { + FileUtil.fullyDelete(testWorkDir); + } @Test(timeout=20000) public void testJobKill() throws Exception { @@ -397,21 +409,8 @@ public class TestYARNRunner extends TestCase { YARNRunner yarnRunner = new YARNRunner(jobConf); - File jobxml = new File(testWorkDir, MRJobConfig.JOB_CONF_FILE); - OutputStream out = new FileOutputStream(jobxml); - conf.writeXml(out); - out.close(); - - File jobsplit = new File(testWorkDir, MRJobConfig.JOB_SPLIT); - out = new FileOutputStream(jobsplit); - out.close(); - - File jobsplitmetainfo = new File(testWorkDir, MRJobConfig.JOB_SPLIT_METAINFO); - out = new FileOutputStream(jobsplitmetainfo); - out.close(); - - ApplicationSubmissionContext submissionContext = - yarnRunner.createApplicationSubmissionContext(jobConf, testWorkDir.toString(), new Credentials()); + ApplicationSubmissionContext submissionContext = + buildSubmitContext(yarnRunner, jobConf); ContainerLaunchContext containerSpec = submissionContext.getAMContainerSpec(); List commands = containerSpec.getCommands(); @@ -463,22 +462,9 @@ public class TestYARNRunner extends TestCase { YARNRunner yarnRunner = new YARNRunner(jobConf); - File jobxml = new File(testWorkDir, MRJobConfig.JOB_CONF_FILE); - OutputStream out = new FileOutputStream(jobxml); - conf.writeXml(out); - out.close(); - - File jobsplit = new File(testWorkDir, MRJobConfig.JOB_SPLIT); - out = new FileOutputStream(jobsplit); - out.close(); - - File jobsplitmetainfo = new File(testWorkDir, MRJobConfig.JOB_SPLIT_METAINFO); - out = new FileOutputStream(jobsplitmetainfo); - out.close(); - @SuppressWarnings("unused") - ApplicationSubmissionContext submissionContext = - yarnRunner.createApplicationSubmissionContext(jobConf, testWorkDir.toString(), new Credentials()); + ApplicationSubmissionContext submissionContext = + buildSubmitContext(yarnRunner, jobConf); String logMsg = bout.toString(); assertTrue(logMsg.contains("WARN - Usage of -Djava.library.path in " + @@ -492,4 +478,60 @@ public class TestYARNRunner extends TestCase { "be set as part of the LD_LIBRARY_PATH in the app master JVM env " + "using yarn.app.mapreduce.am.env config settings.")); } + + @Test + public void testAMStandardEnv() throws Exception { + final String ADMIN_LIB_PATH = "foo"; + final String USER_LIB_PATH = "bar"; + final String USER_SHELL = "shell"; + JobConf jobConf = new JobConf(); + + jobConf.set(MRJobConfig.MR_AM_ADMIN_USER_ENV, "LD_LIBRARY_PATH=" + + ADMIN_LIB_PATH); + jobConf.set(MRJobConfig.MR_AM_ENV, "LD_LIBRARY_PATH=" + + USER_LIB_PATH); + jobConf.set(MRJobConfig.MAPRED_ADMIN_USER_SHELL, USER_SHELL); + + YARNRunner yarnRunner = new YARNRunner(jobConf); + ApplicationSubmissionContext appSubCtx = + buildSubmitContext(yarnRunner, jobConf); + + // make sure PWD is first in the lib path + ContainerLaunchContext clc = appSubCtx.getAMContainerSpec(); + Map env = clc.getEnvironment(); + String libPath = env.get(Environment.LD_LIBRARY_PATH.name()); + assertNotNull("LD_LIBRARY_PATH not set", libPath); + String cps = jobConf.getBoolean( + MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, + MRConfig.DEFAULT_MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM) + ? ApplicationConstants.CLASS_PATH_SEPARATOR : File.pathSeparator; + assertEquals("Bad AM LD_LIBRARY_PATH setting", + MRApps.crossPlatformifyMREnv(conf, Environment.PWD) + + cps + ADMIN_LIB_PATH + cps + USER_LIB_PATH, libPath); + + // make sure SHELL is set + String shell = env.get(Environment.SHELL.name()); + assertNotNull("SHELL not set", shell); + assertEquals("Bad SHELL setting", USER_SHELL, shell); + } + + private ApplicationSubmissionContext buildSubmitContext( + YARNRunner yarnRunner, JobConf jobConf) throws IOException { + File jobxml = new File(testWorkDir, MRJobConfig.JOB_CONF_FILE); + OutputStream out = new FileOutputStream(jobxml); + conf.writeXml(out); + out.close(); + + File jobsplit = new File(testWorkDir, MRJobConfig.JOB_SPLIT); + out = new FileOutputStream(jobsplit); + out.close(); + + File jobsplitmetainfo = new File(testWorkDir, + MRJobConfig.JOB_SPLIT_METAINFO); + out = new FileOutputStream(jobsplitmetainfo); + out.close(); + + return yarnRunner.createApplicationSubmissionContext(jobConf, + testWorkDir.toString(), new Credentials()); + } }