HADOOP-12249. pull argument parsing into a function (aw)

This commit is contained in:
Allen Wittenauer 2015-07-31 14:32:21 -07:00
parent d0e0ba8010
commit 666cafca8d
14 changed files with 297 additions and 159 deletions

View File

@ -234,6 +234,8 @@ Trunk (Unreleased)
HADOOP-10979. Auto-entries in hadoop_usage (aw)
HADOOP-12249. pull argument parsing into a function (aw)
BUG FIXES
HADOOP-11473. test-patch says "-1 overall" even when all checks are +1

View File

@ -17,8 +17,14 @@
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage()
function hadoop_usage
{
hadoop_add_option "buildpaths" "attempt to add class files from build tree"
hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in slave mode"
hadoop_add_option "loglevel level" "set the log4j level for this command"
hadoop_add_option "hosts filename" "list of hosts to use in slave mode"
hadoop_add_option "slaves" "turn on slave mode"
hadoop_add_subcommand "archive" "create a Hadoop archive"
hadoop_add_subcommand "checknative" "check native Hadoop and compression libraries availability"
hadoop_add_subcommand "classpath" "prints the class path needed to get the Hadoop jar and the required libraries"

View File

@ -53,7 +53,7 @@ if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
fi
# get our functions defined for usage later
if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
[[ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-functions.sh" ]]; then
. "${HADOOP_COMMON_HOME}/libexec/hadoop-functions.sh"
elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-functions.sh" ]]; then
@ -93,75 +93,8 @@ hadoop_bootstrap
# shellcheck disable=SC2034
HADOOP_USER_PARAMS=("$@")
HADOOP_DAEMON_MODE="default"
while [[ -z "${_hadoop_common_done}" ]]; do
case $1 in
--buildpaths)
# shellcheck disable=SC2034
HADOOP_ENABLE_BUILD_PATHS=true
shift
;;
--config)
shift
confdir=$1
shift
if [[ -d "${confdir}" ]]; then
# shellcheck disable=SC2034
HADOOP_CONF_DIR="${confdir}"
elif [[ -z "${confdir}" ]]; then
hadoop_error "ERROR: No parameter provided for --config "
hadoop_exit_with_usage 1
else
hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
hadoop_exit_with_usage 1
fi
;;
--daemon)
shift
HADOOP_DAEMON_MODE=$1
shift
if [[ -z "${HADOOP_DAEMON_MODE}" || \
! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
hadoop_exit_with_usage 1
fi
;;
--debug)
shift
# shellcheck disable=SC2034
HADOOP_SHELL_SCRIPT_DEBUG=true
;;
--help|-help|-h|help|--h|--\?|-\?|\?)
hadoop_exit_with_usage 0
;;
--hostnames)
shift
# shellcheck disable=SC2034
HADOOP_SLAVE_NAMES="$1"
shift
;;
--hosts)
shift
hadoop_populate_slaves_file "$1"
shift
;;
--loglevel)
shift
# shellcheck disable=SC2034
HADOOP_LOGLEVEL="$1"
shift
;;
--slaves)
shift
# shellcheck disable=SC2034
HADOOP_SLAVE_MODE=true
;;
*)
_hadoop_common_done=true
;;
esac
done
hadoop_parse_args "$@"
shift "${HADOOP_PARSE_COUNTER}"
#
# Setup the base-line environment

View File

@ -16,7 +16,8 @@
# we need to declare this globally as an array, which can only
# be done outside of a function
declare -a HADOOP_USAGE=()
declare -a HADOOP_SUBCMD_USAGE
declare -a HADOOP_OPTION_USAGE
## @description Print a message to stderr
## @audience public
@ -48,53 +49,72 @@ function hadoop_debug
## @param subcommanddesc
function hadoop_add_subcommand
{
local option=$1
local subcmd=$1
local text=$2
HADOOP_USAGE[${HADOOP_USAGE_COUNTER}]="${option}@${text}"
((HADOOP_USAGE_COUNTER=HADOOP_USAGE_COUNTER+1))
HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${text}"
((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
}
## @description generate standard usage output
## @description and optionally takes a class
## @description Add an option to the usage output
## @audience private
## @stability evolving
## @replaceable no
## @param execname
## @param [true|false]
function hadoop_generate_usage
## @param subcommand
## @param subcommanddesc
function hadoop_add_option
{
local cmd=$1
local takesclass=$2
local i
local counter
local line
local option
local giventext
local maxoptsize
local foldsize=75
local option=$1
local text=$2
HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
}
## @description Reset the usage information to blank
## @audience private
## @stability evolving
## @replaceable no
function hadoop_reset_usage
{
HADOOP_SUBCMD_USAGE=()
HADOOP_OPTION_USAGE=()
HADOOP_SUBCMD_USAGE_COUNTER=0
HADOOP_OPTION_USAGE_COUNTER=0
}
## @description Print a screen-size aware two-column output
## @audience private
## @stability evolving
## @replaceable no
## @param array
function hadoop_generic_columnprinter
{
declare -a input=("$@")
declare -i i=0
declare -i counter=0
declare line
declare text
declare option
declare giventext
declare -i maxoptsize
declare -i foldsize
declare -a tmpa
declare numcols
cmd=${cmd##*/}
echo "Usage: ${cmd} [OPTIONS] SUBCOMMAND [SUBCOMMAND OPTIONS]"
if [[ ${takesclass} = true ]]; then
echo " or ${cmd} [OPTIONS] CLASSNAME [CLASSNAME OPTIONS]"
echo " where CLASSNAME is a user-provided Java class"
if [[ -n "${COLUMNS}" ]]; then
numcols=${COLUMNS}
else
numcols=$(tput cols) 2>/dev/null
fi
if [[ -z "${numcols}"
|| ! "${numcols}" =~ ^[0-9]+$ ]]; then
numcols=75
else
((numcols=numcols-5))
fi
echo ""
echo " OPTIONS is none or any of:"
echo " --config confdir"
echo " --daemon (start|stop|status)"
echo " --debug"
echo " --hostnames list[,of,host,names]"
echo " --hosts filename"
echo " --loglevel loglevel"
echo " --slaves"
echo ""
echo " SUBCOMMAND is one of:"
counter=0
while read -r line; do
tmpa[${counter}]=${line}
((counter=counter+1))
@ -102,12 +122,12 @@ function hadoop_generate_usage
if [[ ${#option} -gt ${maxoptsize} ]]; then
maxoptsize=${#option}
fi
done < <(for i in "${HADOOP_USAGE[@]}"; do
echo "${i}"
done < <(for text in "${input[@]}"; do
echo "${text}"
done | sort)
i=0
((foldsize=75-maxoptsize))
((foldsize=numcols-maxoptsize))
until [[ $i -eq ${#tmpa[@]} ]]; do
option=$(echo "${tmpa[$i]}" | cut -f1 -d'@')
@ -119,8 +139,63 @@ function hadoop_generate_usage
done < <(echo "${giventext}"| fold -s -w ${foldsize})
((i=i+1))
done
echo ""
echo "Most subcommands print help when invoked w/o parameters or with -h."
}
## @description generate standard usage output
## @description and optionally takes a class
## @audience private
## @stability evolving
## @replaceable no
## @param execname
## @param true|false
## @param [text to use in place of SUBCOMMAND]
function hadoop_generate_usage
{
local cmd=$1
local takesclass=$2
local subcmdtext=${3:-"SUBCOMMAND"}
local haveoptions
local optstring
local havesubs
local subcmdstring
cmd=${cmd##*/}
if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
&& "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
haveoptions=true
optstring=" [OPTIONS]"
fi
if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
&& "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
havesubs=true
subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
fi
echo "Usage: ${cmd}${optstring}${subcmdstring}"
if [[ ${takesclass} = true ]]; then
echo " or ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
echo " where CLASSNAME is a user-provided Java class"
fi
if [[ "${haveoptions}" = true ]]; then
echo ""
echo " OPTIONS is none or any of:"
echo ""
hadoop_generic_columnprinter "${HADOOP_OPTION_USAGE[@]}"
fi
if [[ "${havesubs}" = true ]]; then
echo ""
echo " ${subcmdtext} is one of:"
echo ""
hadoop_generic_columnprinter "${HADOOP_SUBCMD_USAGE[@]}"
echo ""
echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
fi
}
## @description Replace `oldvar` with `newvar` if `oldvar` exists.
@ -189,7 +264,7 @@ function hadoop_bootstrap
TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
# usage output set to zero
HADOOP_USAGE_COUNTER=0
hadoop_reset_usage
export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
@ -1730,3 +1805,101 @@ function hadoop_do_classpath_subcommand
exit 0
fi
}
## @description generic shell script opton parser. sets
## @description HADOOP_PARSE_COUNTER to set number the
## @description caller should shift
## @audience private
## @stability evolving
## @replaceable yes
## @param [parameters, typically "$@"]
function hadoop_parse_args
{
HADOOP_DAEMON_MODE="default"
HADOOP_PARSE_COUNTER=0
# not all of the options supported here are supported by all commands
# however these are:
hadoop_add_option "--config dir" "Hadoop config directory"
hadoop_add_option "--debug" "turn on shell script debug mode"
hadoop_add_option "--help" "usage information"
while true; do
hadoop_debug "hadoop_parse_args: processing $1"
case $1 in
--buildpaths)
# shellcheck disable=SC2034
HADOOP_ENABLE_BUILD_PATHS=true
shift
((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
;;
--config)
shift
confdir=$1
shift
((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
if [[ -d "${confdir}" ]]; then
# shellcheck disable=SC2034
HADOOP_CONF_DIR="${confdir}"
elif [[ -z "${confdir}" ]]; then
hadoop_error "ERROR: No parameter provided for --config "
hadoop_exit_with_usage 1
else
hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
hadoop_exit_with_usage 1
fi
;;
--daemon)
shift
HADOOP_DAEMON_MODE=$1
shift
((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
if [[ -z "${HADOOP_DAEMON_MODE}" || \
! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
hadoop_exit_with_usage 1
fi
;;
--debug)
shift
# shellcheck disable=SC2034
HADOOP_SHELL_SCRIPT_DEBUG=true
((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
;;
--help|-help|-h|help|--h|--\?|-\?|\?)
hadoop_exit_with_usage 0
;;
--hostnames)
shift
# shellcheck disable=SC2034
HADOOP_SLAVE_NAMES="$1"
shift
((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
;;
--hosts)
shift
hadoop_populate_slaves_file "$1"
shift
((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
;;
--loglevel)
shift
# shellcheck disable=SC2034
HADOOP_LOGLEVEL="$1"
shift
((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
;;
--slaves)
shift
# shellcheck disable=SC2034
HADOOP_SLAVE_MODE=true
((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
;;
*)
break
;;
esac
done
hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
}

View File

@ -27,7 +27,8 @@
# HADOOP_SSH_OPTS Options passed to ssh when running remote commands.
##
function hadoop_usage {
function hadoop_usage
{
echo "Usage: slaves.sh [--config confdir] command..."
}

29
hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh Normal file → Executable file
View File

@ -13,28 +13,27 @@
# limitations under the License.
#
function hadoop_usage()
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
echo "Usage: kms.sh [--config confdir] [--debug] --daemon start|status|stop"
echo " kms.sh [--config confdir] [--debug] COMMAND"
echo " where COMMAND is one of:"
echo " run Start kms in the current window"
echo " run -security Start in the current window with security manager"
echo " start Start kms in a separate window"
echo " start -security Start in a separate window with security manager"
echo " status Return the LSB compliant status"
echo " stop Stop kms, waiting up to 5 seconds for the process to end"
echo " stop n Stop kms, waiting up to n seconds for the process to end"
echo " stop -force Stop kms, wait up to 5 seconds and then use kill -KILL if still running"
echo " stop n -force Stop kms, wait up to n seconds and then use kill -KILL if still running"
hadoop_add_subcommand "run" "Start kms in the current window"
hadoop_add_subcommand "run -security" "Start in the current window with security manager"
hadoop_add_subcommand "start" "Start kms in a separate window"
hadoop_add_subcommand "start -security" "Start in a separate window with security manager"
hadoop_add_subcommand "status" "Return the LSB compliant status"
hadoop_add_subcommand "stop" "Stop kms, waiting up to 5 seconds for the process to end"
hadoop_add_subcommand "top n" "Stop kms, waiting up to n seconds for the process to end"
hadoop_add_subcommand "stop -force" "Stop kms, wait up to 5 seconds and then use kill -KILL if still running"
hadoop_add_subcommand "stop n -force" "Stop kms, wait up to n seconds and then use kill -KILL if still running"
hadoop_generate_usage "${MYNAME}" false
}
# let's locate libexec...
if [[ -n "${HADOOP_PREFIX}" ]]; then
DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
else
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi

View File

@ -13,28 +13,27 @@
# limitations under the License.
#
function hadoop_usage()
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
echo "Usage: httpfs.sh [--config confdir] [--debug] --daemon start|status|stop"
echo " httpfs.sh [--config confdir] [--debug] COMMAND"
echo " where COMMAND is one of:"
echo " run Start httpfs in the current window"
echo " run -security Start in the current window with security manager"
echo " start Start httpfs in a separate window"
echo " start -security Start in a separate window with security manager"
echo " status Return the LSB compliant status"
echo " stop Stop httpfs, waiting up to 5 seconds for the process to end"
echo " stop n Stop httpfs, waiting up to n seconds for the process to end"
echo " stop -force Stop httpfs, wait up to 5 seconds and then use kill -KILL if still running"
echo " stop n -force Stop httpfs, wait up to n seconds and then use kill -KILL if still running"
hadoop_add_subcommand "run" "Start kms in the current window"
hadoop_add_subcommand "run -security" "Start in the current window with security manager"
hadoop_add_subcommand "start" "Start kms in a separate window"
hadoop_add_subcommand "start -security" "Start in a separate window with security manager"
hadoop_add_subcommand "status" "Return the LSB compliant status"
hadoop_add_subcommand "stop" "Stop kms, waiting up to 5 seconds for the process to end"
hadoop_add_subcommand "top n" "Stop kms, waiting up to n seconds for the process to end"
hadoop_add_subcommand "stop -force" "Stop kms, wait up to 5 seconds and then use kill -KILL if still running"
hadoop_add_subcommand "stop n -force" "Stop kms, wait up to n seconds and then use kill -KILL if still running"
hadoop_generate_usage "${MYNAME}" false
}
# let's locate libexec...
if [[ -n "${HADOOP_PREFIX}" ]]; then
DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
else
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi

View File

@ -19,6 +19,13 @@ MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
hadoop_add_option "--daemon (start|status|stop)" "operate on a daemon"
hadoop_add_option "--hostnames list[,of,host,names]" "hosts to use in slave mode"
hadoop_add_option "--loglevel level" "set the log4j level for this command"
hadoop_add_option "--hosts filename" "list of hosts to use in slave mode"
hadoop_add_option "--slaves" "turn on slave mode"
hadoop_add_subcommand "balancer" "run a cluster balancing utility"
hadoop_add_subcommand "cacheadmin" "configure the HDFS cache"
hadoop_add_subcommand "classpath" "prints the class path needed to get the hadoop jar and the required libraries"
@ -47,8 +54,7 @@ function hadoop_usage
hadoop_add_subcommand "storagepolicies" "list/get/set block storage policies"
hadoop_add_subcommand "version" "print the version"
hadoop_add_subcommand "zkfc" "run the ZK Failover Controller daemon"
hadoop_generate_usage "${MYNAME}"
hadoop_generate_usage "${MYNAME}" false
}
# let's locate libexec...

View File

@ -15,13 +15,19 @@
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
echo "Usage: start-balancer.sh [--config confdir] [-policy <policy>] [-threshold <threshold>]"
hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
hadoop_add_option "--loglevel level" "set the log4j level for this command"
hadoop_add_option "-policy <policy>" "set the balancer's policy"
hadoop_add_option "-threshold <threshold>" "set the threshold for balancing"
hadoop_generate_usage "${MYNAME}" false
}
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
# let's locate libexec...
if [[ -n "${HADOOP_PREFIX}" ]]; then

View File

@ -15,13 +15,17 @@
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
echo "Usage: stop-balancer.sh [--config confdir]"
hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
hadoop_add_option "--loglevel level" "set the log4j level for this command"
hadoop_generate_usage "${MYNAME}" false
}
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
# let's locate libexec...
if [[ -n "${HADOOP_PREFIX}" ]]; then

View File

@ -29,7 +29,7 @@ function hadoop_usage
hadoop_add_subcommand "queue" "get information regarding JobQueues"
hadoop_add_subcommand "sampler" "sampler"
hadoop_add_subcommand "version" "print the version"
hadoop_generate_usage "${MYNAME}"
hadoop_generate_usage "${MYNAME}" true
}
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)

7
hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh Normal file → Executable file
View File

@ -16,13 +16,14 @@
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
echo "Usage: start-yarn.sh [--config confdir]"
hadoop_generate_usage "${MYNAME}" false
}
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
# let's locate libexec...
if [[ -n "${HADOOP_PREFIX}" ]]; then

7
hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh Normal file → Executable file
View File

@ -16,13 +16,14 @@
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
echo "Usage: stop-yarn.sh [--config confdir]"
hadoop_generate_usage "${MYNAME}" false
}
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
# let's locate libexec...
if [[ -n "${HADOOP_PREFIX}" ]]; then

View File

@ -19,6 +19,13 @@ MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
hadoop_add_option "--daemon (start|status|stop)" "operate on a daemon"
hadoop_add_option "--hostnames list[,of,host,names]" "hosts to use in slave mode"
hadoop_add_option "--loglevel level" "set the log4j level for this command"
hadoop_add_option "--hosts filename" "list of hosts to use in slave mode"
hadoop_add_option "--slaves" "turn on slave mode"
hadoop_add_subcommand "application" "prints application(s) report/kill application"
hadoop_add_subcommand "applicationattempt" "prints applicationattempt(s) report"
hadoop_add_subcommand "classpath" "prints the class path needed to get the hadoop jar and the required libraries"