MAPREDUCE-3505. yarn APPLICATION_CLASSPATH needs to be overridable. (ahmed via tucu)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1235391 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7c69883917
commit
f73daf6af1
|
@ -540,6 +540,9 @@ Release 0.23.1 - Unreleased
|
||||||
|
|
||||||
MAPREDUCE-3681. Fixed computation of queue's usedCapacity. (acmurthy)
|
MAPREDUCE-3681. Fixed computation of queue's usedCapacity. (acmurthy)
|
||||||
|
|
||||||
|
MAPREDUCE-3505. yarn APPLICATION_CLASSPATH needs to be overridable.
|
||||||
|
(ahmed via tucu)
|
||||||
|
|
||||||
Release 0.23.0 - 2011-11-01
|
Release 0.23.0 - 2011-11-01
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -522,13 +522,13 @@ public abstract class TaskAttemptImpl implements
|
||||||
* a parent CLC and use it for all the containers, so this should go away
|
* a parent CLC and use it for all the containers, so this should go away
|
||||||
* once the mr-generated-classpath stuff is gone.
|
* once the mr-generated-classpath stuff is gone.
|
||||||
*/
|
*/
|
||||||
private static String getInitialClasspath() throws IOException {
|
private static String getInitialClasspath(Configuration conf) throws IOException {
|
||||||
synchronized (classpathLock) {
|
synchronized (classpathLock) {
|
||||||
if (initialClasspathFlag.get()) {
|
if (initialClasspathFlag.get()) {
|
||||||
return initialClasspath;
|
return initialClasspath;
|
||||||
}
|
}
|
||||||
Map<String, String> env = new HashMap<String, String>();
|
Map<String, String> env = new HashMap<String, String>();
|
||||||
MRApps.setClasspath(env);
|
MRApps.setClasspath(env, conf);
|
||||||
initialClasspath = env.get(Environment.CLASSPATH.name());
|
initialClasspath = env.get(Environment.CLASSPATH.name());
|
||||||
initialClasspathFlag.set(true);
|
initialClasspathFlag.set(true);
|
||||||
return initialClasspath;
|
return initialClasspath;
|
||||||
|
@ -631,7 +631,7 @@ public abstract class TaskAttemptImpl implements
|
||||||
Apps.addToEnvironment(
|
Apps.addToEnvironment(
|
||||||
environment,
|
environment,
|
||||||
Environment.CLASSPATH.name(),
|
Environment.CLASSPATH.name(),
|
||||||
getInitialClasspath());
|
getInitialClasspath(conf));
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
throw new YarnException(e);
|
throw new YarnException(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -38,6 +38,10 @@
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-mapreduce-client-core</artifactId>
|
<artifactId>hadoop-mapreduce-client-core</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
|
<dependency>
|
||||||
|
<groupId>org.apache.hadoop</groupId>
|
||||||
|
<artifactId>hadoop-yarn-server-common</artifactId>
|
||||||
|
</dependency>
|
||||||
</dependencies>
|
</dependencies>
|
||||||
|
|
||||||
<build>
|
<build>
|
||||||
|
|
|
@ -54,6 +54,7 @@ import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||||
import org.apache.hadoop.yarn.api.records.LocalResource;
|
import org.apache.hadoop.yarn.api.records.LocalResource;
|
||||||
import org.apache.hadoop.yarn.api.records.LocalResourceType;
|
import org.apache.hadoop.yarn.api.records.LocalResourceType;
|
||||||
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
|
import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
|
||||||
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||||
import org.apache.hadoop.yarn.util.Apps;
|
import org.apache.hadoop.yarn.util.Apps;
|
||||||
import org.apache.hadoop.yarn.util.BuilderUtils;
|
import org.apache.hadoop.yarn.util.BuilderUtils;
|
||||||
|
@ -171,7 +172,7 @@ public class MRApps extends Apps {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static void setMRFrameworkClasspath(
|
private static void setMRFrameworkClasspath(
|
||||||
Map<String, String> environment) throws IOException {
|
Map<String, String> environment, Configuration conf) throws IOException {
|
||||||
InputStream classpathFileStream = null;
|
InputStream classpathFileStream = null;
|
||||||
BufferedReader reader = null;
|
BufferedReader reader = null;
|
||||||
try {
|
try {
|
||||||
|
@ -208,8 +209,10 @@ public class MRApps extends Apps {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Add standard Hadoop classes
|
// Add standard Hadoop classes
|
||||||
for (String c : ApplicationConstants.APPLICATION_CLASSPATH) {
|
for (String c : conf.get(YarnConfiguration.YARN_APPLICATION_CLASSPATH)
|
||||||
Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c);
|
.split(",")) {
|
||||||
|
Apps.addToEnvironment(environment, Environment.CLASSPATH.name(), c
|
||||||
|
.trim());
|
||||||
}
|
}
|
||||||
} finally {
|
} finally {
|
||||||
if (classpathFileStream != null) {
|
if (classpathFileStream != null) {
|
||||||
|
@ -222,8 +225,8 @@ public class MRApps extends Apps {
|
||||||
// TODO: Remove duplicates.
|
// TODO: Remove duplicates.
|
||||||
}
|
}
|
||||||
|
|
||||||
public static void setClasspath(Map<String, String> environment)
|
public static void setClasspath(Map<String, String> environment,
|
||||||
throws IOException {
|
Configuration conf) throws IOException {
|
||||||
Apps.addToEnvironment(
|
Apps.addToEnvironment(
|
||||||
environment,
|
environment,
|
||||||
Environment.CLASSPATH.name(),
|
Environment.CLASSPATH.name(),
|
||||||
|
@ -232,7 +235,7 @@ public class MRApps extends Apps {
|
||||||
environment,
|
environment,
|
||||||
Environment.CLASSPATH.name(),
|
Environment.CLASSPATH.name(),
|
||||||
Environment.PWD.$() + Path.SEPARATOR + "*");
|
Environment.PWD.$() + Path.SEPARATOR + "*");
|
||||||
MRApps.setMRFrameworkClasspath(environment);
|
MRApps.setMRFrameworkClasspath(environment, conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final String STAGING_CONSTANT = ".staging";
|
private static final String STAGING_CONSTANT = ".staging";
|
||||||
|
|
|
@ -18,7 +18,12 @@
|
||||||
|
|
||||||
package org.apache.hadoop.mapreduce.v2.util;
|
package org.apache.hadoop.mapreduce.v2.util;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
import java.util.HashMap;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.mapreduce.Job;
|
||||||
import org.apache.hadoop.mapreduce.JobID;
|
import org.apache.hadoop.mapreduce.JobID;
|
||||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||||
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
|
||||||
|
@ -121,4 +126,17 @@ public class TestMRApps {
|
||||||
"/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile);
|
"/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test public void testSetClasspath() throws IOException {
|
||||||
|
Job job = Job.getInstance();
|
||||||
|
Map<String, String> environment = new HashMap<String, String>();
|
||||||
|
MRApps.setClasspath(environment, job.getConfiguration());
|
||||||
|
assertEquals("job.jar:$PWD/*:$HADOOP_CONF_DIR:" +
|
||||||
|
"$HADOOP_COMMON_HOME/share/hadoop/common/*:" +
|
||||||
|
"$HADOOP_COMMON_HOME/share/hadoop/common/lib/*:" +
|
||||||
|
"$HADOOP_HDFS_HOME/share/hadoop/hdfs/*:" +
|
||||||
|
"$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*:" +
|
||||||
|
"$YARN_HOME/share/hadoop/mapreduce/*:" +
|
||||||
|
"$YARN_HOME/share/hadoop/mapreduce/lib/*",
|
||||||
|
environment.get("CLASSPATH"));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -406,7 +406,7 @@ public class YARNRunner implements ClientProtocol {
|
||||||
// Setup the CLASSPATH in environment
|
// Setup the CLASSPATH in environment
|
||||||
// i.e. add { job jar, CWD, Hadoop jars} to classpath.
|
// i.e. add { job jar, CWD, Hadoop jars} to classpath.
|
||||||
Map<String, String> environment = new HashMap<String, String>();
|
Map<String, String> environment = new HashMap<String, String>();
|
||||||
MRApps.setClasspath(environment);
|
MRApps.setClasspath(environment, conf);
|
||||||
|
|
||||||
// Parse distributed cache
|
// Parse distributed cache
|
||||||
MRApps.setupDistributedCache(jobConf, localResources);
|
MRApps.setupDistributedCache(jobConf, localResources);
|
||||||
|
|
|
@ -84,21 +84,7 @@ public interface ApplicationConstants {
|
||||||
public static final String STDERR = "stderr";
|
public static final String STDERR = "stderr";
|
||||||
|
|
||||||
public static final String STDOUT = "stdout";
|
public static final String STDOUT = "stdout";
|
||||||
|
|
||||||
/**
|
|
||||||
* Classpath for typical applications.
|
|
||||||
*/
|
|
||||||
public static final String[] APPLICATION_CLASSPATH =
|
|
||||||
new String[] {
|
|
||||||
"$HADOOP_CONF_DIR",
|
|
||||||
"$HADOOP_COMMON_HOME/share/hadoop/common/*",
|
|
||||||
"$HADOOP_COMMON_HOME/share/hadoop/common/lib/*",
|
|
||||||
"$HADOOP_HDFS_HOME/share/hadoop/hdfs/*",
|
|
||||||
"$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*",
|
|
||||||
"$YARN_HOME/share/hadoop/mapreduce/*",
|
|
||||||
"$YARN_HOME/share/hadoop/mapreduce/lib/*"
|
|
||||||
};
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Environment for Applications.
|
* Environment for Applications.
|
||||||
*
|
*
|
||||||
|
|
|
@ -508,6 +508,10 @@ public class YarnConfiguration extends Configuration {
|
||||||
public static final long DEFAULT_NM_PROCESS_KILL_WAIT_MS =
|
public static final long DEFAULT_NM_PROCESS_KILL_WAIT_MS =
|
||||||
2000;
|
2000;
|
||||||
|
|
||||||
|
/** Standard Hadoop classes */
|
||||||
|
public static final String YARN_APPLICATION_CLASSPATH = YARN_PREFIX
|
||||||
|
+ "application.classpath";
|
||||||
|
|
||||||
public YarnConfiguration() {
|
public YarnConfiguration() {
|
||||||
super();
|
super();
|
||||||
}
|
}
|
||||||
|
|
|
@ -482,4 +482,18 @@
|
||||||
<name>yarn.web-proxy.address</name>
|
<name>yarn.web-proxy.address</name>
|
||||||
<value/>
|
<value/>
|
||||||
</property>
|
</property>
|
||||||
|
|
||||||
|
<property>
|
||||||
|
<description>Classpath for typical applications.</description>
|
||||||
|
<name>yarn.application.classpath</name>
|
||||||
|
<value>
|
||||||
|
$HADOOP_CONF_DIR,
|
||||||
|
$HADOOP_COMMON_HOME/share/hadoop/common/*,
|
||||||
|
$HADOOP_COMMON_HOME/share/hadoop/common/lib/*,
|
||||||
|
$HADOOP_HDFS_HOME/share/hadoop/hdfs/*,
|
||||||
|
$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*,
|
||||||
|
$YARN_HOME/share/hadoop/mapreduce/*,
|
||||||
|
$YARN_HOME/share/hadoop/mapreduce/lib/*
|
||||||
|
</value>
|
||||||
|
</property>
|
||||||
</configuration>
|
</configuration>
|
||||||
|
|
Loading…
Reference in New Issue