From 8912aac81e37a8f49fc21e0e367cd8cb96eea583 Mon Sep 17 00:00:00 2001 From: Arun Murthy Date: Tue, 25 Sep 2012 23:37:32 +0000 Subject: [PATCH] YARN-9. Rename YARN_HOME to HADOOP_YARN_HOME. Contributed by Vinod K V. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1390218 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-yarn-project/CHANGES.txt | 2 + hadoop-yarn-project/hadoop-yarn/bin/slaves.sh | 2 +- hadoop-yarn-project/hadoop-yarn/bin/yarn | 50 +++++++++---------- .../hadoop-yarn/bin/yarn-config.sh | 2 +- .../hadoop-yarn/bin/yarn-daemon.sh | 10 ++-- .../hadoop-yarn/bin/yarn-daemons.sh | 2 +- .../hadoop-yarn/conf/yarn-env.sh | 4 +- .../hadoop/yarn/api/ApplicationConstants.java | 4 +- .../hadoop/yarn/conf/YarnConfiguration.java | 29 ++++++++--- .../src/main/resources/yarn-default.xml | 4 +- .../nodemanager/LinuxContainerExecutor.java | 5 +- .../src/site/apt/CapacityScheduler.apt.vm | 2 +- .../src/site/apt/ClusterSetup.apt.vm | 26 +++++----- .../src/site/apt/SingleCluster.apt.vm | 4 +- 14 files changed, 83 insertions(+), 63 deletions(-) diff --git a/hadoop-yarn-project/CHANGES.txt b/hadoop-yarn-project/CHANGES.txt index 71a85e15102..e840ec17c4d 100644 --- a/hadoop-yarn-project/CHANGES.txt +++ b/hadoop-yarn-project/CHANGES.txt @@ -48,6 +48,8 @@ Release 2.0.2-alpha - 2012-09-07 INCOMPATIBLE CHANGES + YARN-9. Rename YARN_HOME to HADOOP_YARN_HOME. (vinodkv via acmurthy) + NEW FEATURES YARN-1. Promote YARN to be a sub-project of Apache Hadoop. (acmurthy) diff --git a/hadoop-yarn-project/hadoop-yarn/bin/slaves.sh b/hadoop-yarn-project/hadoop-yarn/bin/slaves.sh index ee254603d6d..9b783b4f32e 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/slaves.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/slaves.sh @@ -22,7 +22,7 @@ # # YARN_SLAVES File naming remote hosts. # Default is ${YARN_CONF_DIR}/slaves. -# YARN_CONF_DIR Alternate conf dir. Default is ${YARN_HOME}/conf. +# YARN_CONF_DIR Alternate conf dir. Default is ${HADOOP_YARN_HOME}/conf. # YARN_SLAVE_SLEEP Seconds to sleep between spawning remote commands. # YARN_SSH_OPTS Options passed to ssh when running remote commands. ## diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn b/hadoop-yarn-project/hadoop-yarn/bin/yarn index 01687b0f710..b376fd13abf 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn @@ -41,7 +41,7 @@ # more than one command (fs, dfs, fsck, # dfsadmin etc) # -# YARN_CONF_DIR Alternate conf dir. Default is ${YARN_HOME}/conf. +# YARN_CONF_DIR Alternate conf dir. Default is ${HADOOP_YARN_HOME}/conf. # # YARN_ROOT_LOGGER The root appender. Default is INFO,console # @@ -116,43 +116,43 @@ fi CLASSPATH="${HADOOP_CONF_DIR}:${YARN_CONF_DIR}:${CLASSPATH}" # for developers, add Hadoop classes to CLASSPATH -if [ -d "$YARN_HOME/yarn-api/target/classes" ]; then - CLASSPATH=${CLASSPATH}:$YARN_HOME/yarn-api/target/classes +if [ -d "$HADOOP_YARN_HOME/yarn-api/target/classes" ]; then + CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-api/target/classes fi -if [ -d "$YARN_HOME/yarn-common/target/classes" ]; then - CLASSPATH=${CLASSPATH}:$YARN_HOME/yarn-common/target/classes +if [ -d "$HADOOP_YARN_HOME/yarn-common/target/classes" ]; then + CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-common/target/classes fi -if [ -d "$YARN_HOME/yarn-mapreduce/target/classes" ]; then - CLASSPATH=${CLASSPATH}:$YARN_HOME/yarn-mapreduce/target/classes +if [ -d "$HADOOP_YARN_HOME/yarn-mapreduce/target/classes" ]; then + CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-mapreduce/target/classes fi -if [ -d "$YARN_HOME/yarn-master-worker/target/classes" ]; then - CLASSPATH=${CLASSPATH}:$YARN_HOME/yarn-master-worker/target/classes +if [ -d "$HADOOP_YARN_HOME/yarn-master-worker/target/classes" ]; then + CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-master-worker/target/classes fi -if [ -d "$YARN_HOME/yarn-server/yarn-server-nodemanager/target/classes" ]; then - CLASSPATH=${CLASSPATH}:$YARN_HOME/yarn-server/yarn-server-nodemanager/target/classes +if [ -d "$HADOOP_YARN_HOME/yarn-server/yarn-server-nodemanager/target/classes" ]; then + CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-server/yarn-server-nodemanager/target/classes fi -if [ -d "$YARN_HOME/yarn-server/yarn-server-common/target/classes" ]; then - CLASSPATH=${CLASSPATH}:$YARN_HOME/yarn-server/yarn-server-common/target/classes +if [ -d "$HADOOP_YARN_HOME/yarn-server/yarn-server-common/target/classes" ]; then + CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-server/yarn-server-common/target/classes fi -if [ -d "$YARN_HOME/yarn-server/yarn-server-resourcemanager/target/classes" ]; then - CLASSPATH=${CLASSPATH}:$YARN_HOME/yarn-server/yarn-server-resourcemanager/target/classes +if [ -d "$HADOOP_YARN_HOME/yarn-server/yarn-server-resourcemanager/target/classes" ]; then + CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/yarn-server/yarn-server-resourcemanager/target/classes fi -if [ -d "$YARN_HOME/build/test/classes" ]; then - CLASSPATH=${CLASSPATH}:$YARN_HOME/target/test/classes +if [ -d "$HADOOP_YARN_HOME/build/test/classes" ]; then + CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/target/test/classes fi -if [ -d "$YARN_HOME/build/tools" ]; then - CLASSPATH=${CLASSPATH}:$YARN_HOME/build/tools +if [ -d "$HADOOP_YARN_HOME/build/tools" ]; then + CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/build/tools fi -CLASSPATH=${CLASSPATH}:$YARN_HOME/${YARN_DIR}/* -CLASSPATH=${CLASSPATH}:$YARN_HOME/${YARN_LIB_JARS_DIR}/* +CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/${YARN_DIR}/* +CLASSPATH=${CLASSPATH}:$HADOOP_YARN_HOME/${YARN_LIB_JARS_DIR}/* # so that filenames w/ spaces are handled correctly in loops below IFS= # default log directory & file if [ "$YARN_LOG_DIR" = "" ]; then - YARN_LOG_DIR="$YARN_HOME/logs" + YARN_LOG_DIR="$HADOOP_YARN_HOME/logs" fi if [ "$YARN_LOGFILE" = "" ]; then YARN_LOGFILE='yarn.log' @@ -210,7 +210,7 @@ fi # cygwin path translation if $cygwin; then CLASSPATH=`cygpath -p -w "$CLASSPATH"` - YARN_HOME=`cygpath -w "$YARN_HOME"` + HADOOP_YARN_HOME=`cygpath -w "$HADOOP_YARN_HOME"` YARN_LOG_DIR=`cygpath -w "$YARN_LOG_DIR"` TOOL_PATH=`cygpath -p -w "$TOOL_PATH"` fi @@ -224,8 +224,8 @@ YARN_OPTS="$YARN_OPTS -Dhadoop.log.dir=$YARN_LOG_DIR" YARN_OPTS="$YARN_OPTS -Dyarn.log.dir=$YARN_LOG_DIR" YARN_OPTS="$YARN_OPTS -Dhadoop.log.file=$YARN_LOGFILE" YARN_OPTS="$YARN_OPTS -Dyarn.log.file=$YARN_LOGFILE" -YARN_OPTS="$YARN_OPTS -Dyarn.home.dir=$YARN_HOME" -YARN_OPTS="$YARN_OPTS -Dhadoop.home.dir=$YARN_HOME" +YARN_OPTS="$YARN_OPTS -Dyarn.home.dir=$HADOOP_YARN_HOME" +YARN_OPTS="$YARN_OPTS -Dhadoop.home.dir=$HADOOP_YARN_HOME" YARN_OPTS="$YARN_OPTS -Dhadoop.root.logger=${YARN_ROOT_LOGGER:-INFO,console}" YARN_OPTS="$YARN_OPTS -Dyarn.root.logger=${YARN_ROOT_LOGGER:-INFO,console}" if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh b/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh index 275869f7365..3d67801efec 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn-config.sh @@ -49,7 +49,7 @@ then fi # Allow alternate conf dir location. -export YARN_CONF_DIR="${HADOOP_CONF_DIR:-$YARN_HOME/conf}" +export YARN_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_YARN_HOME/conf}" #check to see it is specified whether to use the slaves or the # masters file diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh index 07326a166bc..2df1044670f 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemon.sh @@ -20,7 +20,7 @@ # # Environment Variables # -# YARN_CONF_DIR Alternate conf dir. Default is ${YARN_HOME}/conf. +# YARN_CONF_DIR Alternate conf dir. Default is ${HADOOP_YARN_HOME}/conf. # YARN_LOG_DIR Where log files are stored. PWD by default. # YARN_MASTER host:path where hadoop code should be rsync'd from # YARN_PID_DIR The pid files are stored. /tmp by default. @@ -76,7 +76,7 @@ fi # get log directory if [ "$YARN_LOG_DIR" = "" ]; then - export YARN_LOG_DIR="$YARN_HOME/logs" + export YARN_LOG_DIR="$HADOOP_YARN_HOME/logs" fi if [ ! -w "$YARN_LOG_DIR" ] ; then @@ -115,13 +115,13 @@ case $startStop in if [ "$YARN_MASTER" != "" ]; then echo rsync from $YARN_MASTER - rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $YARN_MASTER/ "$YARN_HOME" + rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $YARN_MASTER/ "$HADOOP_YARN_HOME" fi hadoop_rotate_log $log echo starting $command, logging to $log - cd "$YARN_HOME" - nohup nice -n $YARN_NICENESS "$YARN_HOME"/bin/yarn --config $YARN_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null & + cd "$HADOOP_YARN_HOME" + nohup nice -n $YARN_NICENESS "$HADOOP_YARN_HOME"/bin/yarn --config $YARN_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null & echo $! > $pid sleep 1; head "$log" ;; diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh index aafb42b9b17..a7858e49690 100644 --- a/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh +++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn-daemons.sh @@ -34,5 +34,5 @@ DEFAULT_LIBEXEC_DIR="$bin"/../libexec HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR} . $HADOOP_LIBEXEC_DIR/yarn-config.sh -exec "$bin/slaves.sh" --config $YARN_CONF_DIR cd "$YARN_HOME" \; "$bin/yarn-daemon.sh" --config $YARN_CONF_DIR "$@" +exec "$bin/slaves.sh" --config $YARN_CONF_DIR cd "$HADOOP_YARN_HOME" \; "$bin/yarn-daemon.sh" --config $YARN_CONF_DIR "$@" diff --git a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh index 2ccb38e3f64..2a40f10550d 100644 --- a/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh +++ b/hadoop-yarn-project/hadoop-yarn/conf/yarn-env.sh @@ -17,7 +17,7 @@ export HADOOP_YARN_USER=${HADOOP_YARN_USER:-yarn} # resolve links - $0 may be a softlink -export YARN_CONF_DIR="${YARN_CONF_DIR:-$YARN_HOME/conf}" +export YARN_CONF_DIR="${YARN_CONF_DIR:-$HADOOP_YARN_HOME/conf}" # some Java parameters # export JAVA_HOME=/home/y/libexec/jdk1.6.0/ @@ -47,7 +47,7 @@ IFS= # default log directory & file if [ "$YARN_LOG_DIR" = "" ]; then - YARN_LOG_DIR="$YARN_HOME/logs" + YARN_LOG_DIR="$HADOOP_YARN_HOME/logs" fi if [ "$YARN_LOGFILE" = "" ]; then YARN_LOGFILE='yarn.log' diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java index ec2798315c5..60065fbcf1b 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java @@ -169,9 +169,9 @@ public enum Environment { MALLOC_ARENA_MAX("MALLOC_ARENA_MAX"), /** - * $YARN_HOME + * $HADOOP_YARN_HOME */ - YARN_HOME("YARN_HOME"); + HADOOP_YARN_HOME("HADOOP_YARN_HOME"); private final String variable; private Environment(String variable) { diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java index 16de8042d8d..90b9abbf0cc 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java @@ -21,9 +21,12 @@ import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.UnknownHostException; +import java.util.Arrays; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.api.ApplicationConstants; import com.google.common.base.Joiner; import com.google.common.base.Splitter; @@ -281,7 +284,12 @@ public class YarnConfiguration extends Configuration { /** Environment variables that containers may override rather than use NodeManager's default.*/ public static final String NM_ENV_WHITELIST = NM_PREFIX + "env-whitelist"; - public static final String DEFAULT_NM_ENV_WHITELIST = "JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,YARN_HOME"; + public static final String DEFAULT_NM_ENV_WHITELIST = StringUtils.join(",", + Arrays.asList(ApplicationConstants.Environment.JAVA_HOME.key(), + ApplicationConstants.Environment.HADOOP_COMMON_HOME.key(), + ApplicationConstants.Environment.HADOOP_HDFS_HOME.key(), + ApplicationConstants.Environment.HADOOP_CONF_DIR.key(), + ApplicationConstants.Environment.HADOOP_YARN_HOME.key())); /** address of node manager IPC.*/ public static final String NM_ADDRESS = NM_PREFIX + "address"; @@ -570,12 +578,19 @@ public class YarnConfiguration extends Configuration { * CLASSPATH entries */ public static final String[] DEFAULT_YARN_APPLICATION_CLASSPATH = { - "$HADOOP_CONF_DIR", "$HADOOP_COMMON_HOME/share/hadoop/common/*", - "$HADOOP_COMMON_HOME/share/hadoop/common/lib/*", - "$HADOOP_HDFS_HOME/share/hadoop/hdfs/*", - "$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*", - "$YARN_HOME/share/hadoop/yarn/*", - "$YARN_HOME/share/hadoop/yarn/lib/*"}; + ApplicationConstants.Environment.HADOOP_CONF_DIR.$(), + ApplicationConstants.Environment.HADOOP_COMMON_HOME.$() + + "/share/hadoop/common/*", + ApplicationConstants.Environment.HADOOP_COMMON_HOME.$() + + "/share/hadoop/common/lib/*", + ApplicationConstants.Environment.HADOOP_HDFS_HOME.$() + + "/share/hadoop/hdfs/*", + ApplicationConstants.Environment.HADOOP_HDFS_HOME.$() + + "/share/hadoop/hdfs/lib/*", + ApplicationConstants.Environment.HADOOP_YARN_HOME.$() + + "/share/hadoop/yarn/*", + ApplicationConstants.Environment.HADOOP_YARN_HOME.$() + + "/share/hadoop/yarn/lib/*" }; /** Container temp directory */ public static final String DEFAULT_CONTAINER_TEMP_DIR = "./tmp"; diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml index 1d0873bfa43..818933e69c8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/resources/yarn-default.xml @@ -266,7 +266,7 @@ Environment variables that containers may override rather than use NodeManager's default. yarn.nodemanager.env-whitelist - JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,YARN_HOME + JAVA_HOME,HADOOP_COMMON_HOME,HADOOP_HDFS_HOME,HADOOP_CONF_DIR,HADOOP_YARN_HOME @@ -561,7 +561,7 @@ CLASSPATH for YARN applications. A comma-separated list of CLASSPATH entries yarn.application.classpath - $HADOOP_CONF_DIR,$HADOOP_COMMON_HOME/share/hadoop/common/*,$HADOOP_COMMON_HOME/share/hadoop/common/lib/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*,$YARN_HOME/share/hadoop/yarn/*,$YARN_HOME/share/hadoop/yarn/lib/* + $HADOOP_CONF_DIR,$HADOOP_COMMON_HOME/share/hadoop/common/*,$HADOOP_COMMON_HOME/share/hadoop/common/lib/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/*,$HADOOP_HDFS_HOME/share/hadoop/hdfs/lib/*,$HADOOP_YARN_HOME/share/hadoop/yarn/*,$HADOOP_YARN_HOME/share/hadoop/yarn/lib/* diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java index be515377637..c18a0c93dd9 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/LinuxContainerExecutor.java @@ -32,6 +32,7 @@ import org.apache.hadoop.util.Shell.ExitCodeException; import org.apache.hadoop.util.Shell.ShellCommandExecutor; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.yarn.api.ApplicationConstants; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; @@ -92,7 +93,9 @@ int getValue() { } protected String getContainerExecutorExecutablePath(Configuration conf) { - File hadoopBin = new File(System.getenv("YARN_HOME"), "bin"); + String yarnHomeEnvVar = + System.getenv(ApplicationConstants.Environment.HADOOP_YARN_HOME.key()); + File hadoopBin = new File(yarnHomeEnvVar, "bin"); String defaultPath = new File(hadoopBin, "container-executor").getAbsolutePath(); return null == conf diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/CapacityScheduler.apt.vm b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/CapacityScheduler.apt.vm index 0f57f339080..dbd213ac173 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/CapacityScheduler.apt.vm +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/CapacityScheduler.apt.vm @@ -318,7 +318,7 @@ Hadoop MapReduce Next Generation - Capacity Scheduler ---- $ vi $HADOOP_CONF_DIR/capacity-scheduler.xml -$ $YARN_HOME/bin/yarn rmadmin -refreshQueues +$ $HADOOP_YARN_HOME/bin/yarn rmadmin -refreshQueues ---- Queues cannot be , only addition of new queues is supported - diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm index 673125506f5..c0389377465 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/ClusterSetup.apt.vm @@ -497,20 +497,20 @@ Hadoop MapReduce Next Generation - Cluster Setup ResourceManager: ---- - $ $YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start resourcemanager + $ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start resourcemanager ---- Run a script to start NodeManagers on all slaves: ---- - $ $YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start nodemanager + $ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start nodemanager ---- Start a standalone WebAppProxy server. If multiple servers are used with load balancing it should be run on each of them: ---- - $ $YARN_HOME/bin/yarn start proxyserver --config $HADOOP_CONF_DIR + $ $HADOOP_YARN_HOME/bin/yarn start proxyserver --config $HADOOP_CONF_DIR ---- Start the MapReduce JobHistory Server with the following command, run on the @@ -539,20 +539,20 @@ Hadoop MapReduce Next Generation - Cluster Setup ResourceManager: ---- - $ $YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR stop resourcemanager + $ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR stop resourcemanager ---- Run a script to stop NodeManagers on all slaves: ---- - $ $YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR stop nodemanager + $ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR stop nodemanager ---- Stop the WebAppProxy server. If multiple servers are used with load balancing it should be run on each of them: ---- - $ $YARN_HOME/bin/yarn stop proxyserver --config $HADOOP_CONF_DIR + $ $HADOOP_YARN_HOME/bin/yarn stop proxyserver --config $HADOOP_CONF_DIR ---- @@ -883,7 +883,7 @@ KVNO Timestamp Principal The path passed in <<<-Dcontainer-executor.conf.dir>>> should be the path on the cluster nodes where a configuration file for the setuid executable should be located. The executable should be installed in - $YARN_HOME/bin. + $HADOOP_YARN_HOME/bin. The executable must have specific permissions: 6050 or --Sr-s--- permissions user-owned by (super-user) and group-owned by a @@ -1040,13 +1040,13 @@ KVNO Timestamp Principal ResourceManager as : ---- -[yarn]$ $YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start resourcemanager +[yarn]$ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start resourcemanager ---- Run a script to start NodeManagers on all slaves as : ---- -[yarn]$ $YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start nodemanager +[yarn]$ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR start nodemanager ---- Start a standalone WebAppProxy server. Run on the WebAppProxy @@ -1054,7 +1054,7 @@ KVNO Timestamp Principal it should be run on each of them: ---- -[yarn]$ $YARN_HOME/bin/yarn start proxyserver --config $HADOOP_CONF_DIR +[yarn]$ $HADOOP_YARN_HOME/bin/yarn start proxyserver --config $HADOOP_CONF_DIR ---- Start the MapReduce JobHistory Server with the following command, run on the @@ -1083,13 +1083,13 @@ KVNO Timestamp Principal ResourceManager as : ---- -[yarn]$ $YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR stop resourcemanager +[yarn]$ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR stop resourcemanager ---- Run a script to stop NodeManagers on all slaves as : ---- -[yarn]$ $YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR stop nodemanager +[yarn]$ $HADOOP_YARN_HOME/sbin/yarn-daemon.sh --config $HADOOP_CONF_DIR stop nodemanager ---- Stop the WebAppProxy server. Run on the WebAppProxy server as @@ -1097,7 +1097,7 @@ KVNO Timestamp Principal should be run on each of them: ---- -[yarn]$ $YARN_HOME/bin/yarn stop proxyserver --config $HADOOP_CONF_DIR +[yarn]$ $HADOOP_YARN_HOME/bin/yarn stop proxyserver --config $HADOOP_CONF_DIR ---- Stop the MapReduce JobHistory Server with the following command, run on the diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/SingleCluster.apt.vm b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/SingleCluster.apt.vm index f4ea1fe69c7..0cec916039c 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/SingleCluster.apt.vm +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/SingleCluster.apt.vm @@ -43,7 +43,7 @@ $ mvn clean install assembly:assembly -Pnative Assuming you have installed hadoop-common/hadoop-hdfs and exported <<$HADOOP_COMMON_HOME>>/<<$HADOOP_HDFS_HOME>>, untar hadoop mapreduce tarball and set environment variable <<$HADOOP_MAPRED_HOME>> to the - untarred directory. Set <<$YARN_HOME>> the same as <<$HADOOP_MAPRED_HOME>>. + untarred directory. Set <<$HADOOP_YARN_HOME>> the same as <<$HADOOP_MAPRED_HOME>>. <> The following instructions assume you have hdfs running. @@ -174,7 +174,7 @@ Add the following configs to your <<>> * Running daemons. Assuming that the environment variables <<$HADOOP_COMMON_HOME>>, <<$HADOOP_HDFS_HOME>>, <<$HADOO_MAPRED_HOME>>, - <<$YARN_HOME>>, <<$JAVA_HOME>> and <<$HADOOP_CONF_DIR>> have been set appropriately. + <<$HADOOP_YARN_HOME>>, <<$JAVA_HOME>> and <<$HADOOP_CONF_DIR>> have been set appropriately. Set $<<$YARN_CONF_DIR>> the same as $<> Run ResourceManager and NodeManager as: