From 452b37a2d7ea6d1ed5756a8f9b705327a0871526 Mon Sep 17 00:00:00 2001 From: Aaron Myers Date: Fri, 30 May 2014 01:53:36 +0000 Subject: [PATCH] HADOOP-10638. Updating hadoop-daemon.sh to work as expected when nfs is started as a privileged user. Contributed by Manikandan Narayanaswamy. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1598452 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-common-project/hadoop-common/CHANGES.txt | 3 +++ .../hadoop-common/src/main/bin/hadoop-daemon.sh | 11 +++++++++++ 2 files changed, 14 insertions(+) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 6df3a5badf4..0c07d433300 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -168,6 +168,9 @@ Release 2.5.0 - UNRELEASED HADOOP-10639. FileBasedKeyStoresFactory initialization is not using default for SSL_REQUIRE_CLIENT_CERT_KEY. (tucu) + HADOOP-10638. Updating hadoop-daemon.sh to work as expected when nfs is + started as a privileged user. (Manikandan Narayanaswamy via atm) + Release 2.4.1 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh index ece40eff24f..bb6ed8690ef 100755 --- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh +++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-daemon.sh @@ -87,6 +87,14 @@ if [ "$command" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_ starting_secure_dn="true" fi +#Determine if we're starting a privileged NFS, if so, redefine the appropriate variables +if [ "$command" == "nfs3" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_PRIVILEGED_NFS_USER" ]; then + export HADOOP_PID_DIR=$HADOOP_PRIVILEGED_NFS_PID_DIR + export HADOOP_LOG_DIR=$HADOOP_PRIVILEGED_NFS_LOG_DIR + export HADOOP_IDENT_STRING=$HADOOP_PRIVILEGED_NFS_USER + starting_privileged_nfs="true" +fi + if [ "$HADOOP_IDENT_STRING" = "" ]; then export HADOOP_IDENT_STRING="$USER" fi @@ -162,6 +170,9 @@ case $startStop in echo "ulimit -a for secure datanode user $HADOOP_SECURE_DN_USER" >> $log # capture the ulimit info for the appropriate user su --shell=/bin/bash $HADOOP_SECURE_DN_USER -c 'ulimit -a' >> $log 2>&1 + elif [ "true" = "$starting_privileged_nfs" ]; then + echo "ulimit -a for privileged nfs user $HADOOP_PRIVILEGED_NFS_USER" >> $log + su --shell=/bin/bash $HADOOP_PRIVILEGED_NFS_USER -c 'ulimit -a' >> $log 2>&1 else echo "ulimit -a for user $USER" >> $log ulimit -a >> $log 2>&1