HADOOP-10979. Auto-entries in hadoop_usage (aw)

This commit is contained in:
Allen Wittenauer 2015-07-16 16:58:11 -07:00
parent 3540d5fe4b
commit ee36f4f9b8
6 changed files with 186 additions and 127 deletions

View File

@ -232,6 +232,8 @@ Trunk (Unreleased)
HADOOP-12149. copy all of test-patch BINDIR prior to re-exec (aw)
HADOOP-10979. Auto-entries in hadoop_usage (aw)
BUG FIXES
HADOOP-11473. test-patch says "-1 overall" even when all checks are +1

View File

@ -15,47 +15,35 @@
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage()
{
echo "Usage: hadoop [--config confdir] [COMMAND | CLASSNAME]"
echo " CLASSNAME run the class named CLASSNAME"
echo " or"
echo " where COMMAND is one of:"
echo " archive -archiveName NAME -p <parent path> <src>* <dest>"
echo " create a Hadoop archive"
echo " checknative [-a|-h] check native Hadoop and compression "
echo " libraries availability"
echo " classpath prints the class path needed to get the"
echo " Hadoop jar and the required libraries"
echo " conftest validate configuration XML files"
echo " credential interact with credential providers"
echo " daemonlog get/set the log level for each daemon"
echo " distch path:owner:group:permisson"
echo " distributed metadata changer"
echo " distcp <srcurl> <desturl> "
echo " copy file or directories recursively"
echo " fs run a generic filesystem user client"
echo " jar <jar> run a jar file"
echo " note: please use \"yarn jar\" to launch"
echo " YARN applications, not this command."
echo " jnipath prints the java.library.path"
echo " kerbname show auth_to_local principal conversion"
echo " key manage keys via the KeyProvider"
echo " trace view and modify Hadoop tracing settings"
echo " version print the version"
echo ""
echo "Most commands print help when invoked w/o parameters."
hadoop_add_subcommand "archive" "create a Hadoop archive"
hadoop_add_subcommand "checknative" "check native Hadoop and compression libraries availability"
hadoop_add_subcommand "classpath" "prints the class path needed to get the Hadoop jar and the required libraries"
hadoop_add_subcommand "conftest" "validate configuration XML files"
hadoop_add_subcommand "credential" "interact with credential providers"
hadoop_add_subcommand "daemonlog" "get/set the log level for each daemon"
hadoop_add_subcommand "distch" "distributed metadata changer"
hadoop_add_subcommand "distcp" "copy file or directories recursively"
hadoop_add_subcommand "fs" "run a generic filesystem user client"
hadoop_add_subcommand "jar <jar>" "run a jar file. NOTE: please use \"yarn jar\" to launch YARN applications, not this command."
hadoop_add_subcommand "jnipath" "prints the java.library.path"
hadoop_add_subcommand "kerbname" "show auth_to_local principal conversion"
hadoop_add_subcommand "key" "manage keys via the KeyProvider"
hadoop_add_subcommand "trace" "view and modify Hadoop tracing settings"
hadoop_add_subcommand "version" "print the version"
hadoop_generate_usage "${MYNAME}" true
}
# This script runs the hadoop core commands.
# let's locate libexec...
if [[ -n "${HADOOP_PREFIX}" ]]; then
DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
else
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi
@ -98,7 +86,7 @@ case ${COMMAND} in
exit 1
fi
;;
#mapred commands for backwards compatibility
pipes|job|queue|mrgroups|mradmin|jobtracker|tasktracker)
hadoop_error "WARNING: Use of this script to execute ${COMMAND} is deprecated."

View File

@ -14,6 +14,10 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# we need to declare this globally as an array, which can only
# be done outside of a function
declare -a HADOOP_USAGE=()
## @description Print a message to stderr
## @audience public
## @stability stable
@ -36,6 +40,89 @@ function hadoop_debug
fi
}
## @description Add a subcommand to the usage output
## @audience private
## @stability evolving
## @replaceable no
## @param subcommand
## @param subcommanddesc
function hadoop_add_subcommand
{
local option=$1
local text=$2
HADOOP_USAGE[${HADOOP_USAGE_COUNTER}]="${option}@${text}"
((HADOOP_USAGE_COUNTER=HADOOP_USAGE_COUNTER+1))
}
## @description generate standard usage output
## @description and optionally takes a class
## @audience private
## @stability evolving
## @replaceable no
## @param execname
## @param [true|false]
function hadoop_generate_usage
{
local cmd=$1
local takesclass=$2
local i
local counter
local line
local option
local giventext
local maxoptsize
local foldsize=75
declare -a tmpa
cmd=${cmd##*/}
echo "Usage: ${cmd} [OPTIONS] SUBCOMMAND [SUBCOMMAND OPTIONS]"
if [[ ${takesclass} = true ]]; then
echo " or ${cmd} [OPTIONS] CLASSNAME [CLASSNAME OPTIONS]"
echo " where CLASSNAME is a user-provided Java class"
fi
echo ""
echo " OPTIONS is none or any of:"
echo " --config confdir"
echo " --daemon (start|stop|status)"
echo " --debug"
echo " --hostnames list[,of,host,names]"
echo " --hosts filename"
echo " --loglevel loglevel"
echo " --slaves"
echo ""
echo " SUBCOMMAND is one of:"
counter=0
while read -r line; do
tmpa[${counter}]=${line}
((counter=counter+1))
option=$(echo "${line}" | cut -f1 -d'@')
if [[ ${#option} -gt ${maxoptsize} ]]; then
maxoptsize=${#option}
fi
done < <(for i in "${HADOOP_USAGE[@]}"; do
echo "${i}"
done | sort)
i=0
((foldsize=75-maxoptsize))
until [[ $i -eq ${#tmpa[@]} ]]; do
option=$(echo "${tmpa[$i]}" | cut -f1 -d'@')
giventext=$(echo "${tmpa[$i]}" | cut -f2 -d'@')
while read -r line; do
printf "%-${maxoptsize}s %-s\n" "${option}" "${line}"
option=" "
done < <(echo "${giventext}"| fold -s -w ${foldsize})
((i=i+1))
done
echo ""
echo "Most subcommands print help when invoked w/o parameters or with -h."
}
## @description Replace `oldvar` with `newvar` if `oldvar` exists.
## @audience public
## @stability stable
@ -101,6 +188,9 @@ function hadoop_bootstrap
# setup a default TOOL_PATH
TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
# usage output set to zero
HADOOP_USAGE_COUNTER=0
export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
# defaults

View File

@ -15,55 +15,47 @@
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
echo "Usage: hdfs [--config confdir] [--daemon (start|stop|status)]"
echo " [--loglevel loglevel] COMMAND"
echo " where COMMAND is one of:"
echo " balancer run a cluster balancing utility"
echo " cacheadmin configure the HDFS cache"
echo " classpath prints the class path needed to get the"
echo " Hadoop jar and the required libraries"
echo " crypto configure HDFS encryption zones"
echo " datanode run a DFS datanode"
echo " debug run a Debug Admin to execute HDFS debug commands"
echo " dfs run a filesystem command on the file system"
echo " dfsadmin run a DFS admin client"
echo " fetchdt fetch a delegation token from the NameNode"
echo " fsck run a DFS filesystem checking utility"
echo " getconf get config values from configuration"
echo " groups get the groups which users belong to"
echo " haadmin run a DFS HA admin client"
echo " jmxget get JMX exported values from NameNode or DataNode."
echo " journalnode run the DFS journalnode"
echo " lsSnapshottableDir list all snapshottable dirs owned by the current user"
echo " Use -help to see options"
echo " mover run a utility to move block replicas across"
echo " storage types"
echo " namenode run the DFS namenode"
echo " Use -format to initialize the DFS filesystem"
echo " nfs3 run an NFS version 3 gateway"
echo " oev apply the offline edits viewer to an edits file"
echo " oiv apply the offline fsimage viewer to an fsimage"
echo " oiv_legacy apply the offline fsimage viewer to a legacy fsimage"
echo " portmap run a portmap service"
echo " secondarynamenode run the DFS secondary namenode"
echo " snapshotDiff diff two snapshots of a directory or diff the"
echo " current directory contents with a snapshot"
echo " storagepolicies list/get/set block storage policies"
echo " version print the version"
echo " zkfc run the ZK Failover Controller daemon"
echo ""
echo "Most commands print help when invoked w/o parameters."
# There are also debug commands, but they don't show up in this listing.
hadoop_add_subcommand "balancer" "run a cluster balancing utility"
hadoop_add_subcommand "cacheadmin" "configure the HDFS cache"
hadoop_add_subcommand "classpath" "prints the class path needed to get the hadoop jar and the required libraries"
hadoop_add_subcommand "crypto" "configure HDFS encryption zones"
hadoop_add_subcommand "datanode" "run a DFS datanode"
hadoop_add_subcommand "debug" "run a Debug Admin to execute HDFS debug commands"
hadoop_add_subcommand "dfs" "run a filesystem command on the file system"
hadoop_add_subcommand "dfsadmin" "run a DFS admin client"
hadoop_add_subcommand "fetchdt" "fetch a delegation token from the NameNode"
hadoop_add_subcommand "fsck" "run a DFS filesystem checking utility"
hadoop_add_subcommand "getconf" "get config values from configuration"
hadoop_add_subcommand "groups" "get the groups which users belong to"
hadoop_add_subcommand "haadmin" "run a DFS HA admin client"
hadoop_add_subcommand "jmxget" "get JMX exported values from NameNode or DataNode."
hadoop_add_subcommand "journalnode" "run the DFS journalnode"
hadoop_add_subcommand "lsSnapshottableDir" "list all snapshottable dirs owned by the current user"
hadoop_add_subcommand "mover" "run a utility to move block replicas across storage types"
hadoop_add_subcommand "namenode" "run the DFS namenode"
hadoop_add_subcommand "nfs3" "run an NFS version 3 gateway"
hadoop_add_subcommand "oev" "apply the offline edits viewer to an edits file"
hadoop_add_subcommand "oiv" "apply the offline fsimage viewer to an fsimage"
hadoop_add_subcommand "oiv_legacy" "apply the offline fsimage viewer to a legacy fsimage"
hadoop_add_subcommand "portmap" "run a portmap service"
hadoop_add_subcommand "secondarynamenode" "run the DFS secondary namenode"
hadoop_add_subcommand "snapshotDiff" "diff two snapshots of a directory or diff the current directory contents with a snapshot"
hadoop_add_subcommand "storagepolicies" "list/get/set block storage policies"
hadoop_add_subcommand "version" "print the version"
hadoop_add_subcommand "zkfc" "run the ZK Failover Controller daemon"
hadoop_generate_usage "${MYNAME}"
}
# let's locate libexec...
if [[ -n "${HADOOP_PREFIX}" ]]; then
DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
else
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi
@ -107,11 +99,11 @@ case ${COMMAND} in
if [[ -n "${HADOOP_SECURE_DN_USER}" ]]; then
secure_service="true"
secure_user="${HADOOP_SECURE_DN_USER}"
# backward compatiblity
HADOOP_SECURE_PID_DIR="${HADOOP_SECURE_PID_DIR:-$HADOOP_SECURE_DN_PID_DIR}"
HADOOP_SECURE_LOG_DIR="${HADOOP_SECURE_LOG_DIR:-$HADOOP_SECURE_DN_LOG_DIR}"
hadoop_debug "Appending HADOOP_DATANODE_OPTS onto HADOOP_OPTS"
hadoop_debug "Appending HADOOP_DN_SECURE_EXTRA_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_DATANODE_OPTS} ${HADOOP_DN_SECURE_EXTRA_OPTS}"
@ -186,11 +178,11 @@ case ${COMMAND} in
if [[ -n "${HADOOP_PRIVILEGED_NFS_USER}" ]]; then
secure_service="true"
secure_user="${HADOOP_PRIVILEGED_NFS_USER}"
# backward compatiblity
HADOOP_SECURE_PID_DIR="${HADOOP_SECURE_PID_DIR:-$HADOOP_SECURE_NFS3_PID_DIR}"
HADOOP_SECURE_LOG_DIR="${HADOOP_SECURE_LOG_DIR:-$HADOOP_SECURE_NFS3_LOG_DIR}"
hadoop_debug "Appending HADOOP_NFS3_OPTS onto HADOOP_OPTS"
hadoop_debug "Appending HADOOP_NFS3_SECURE_EXTRA_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_NFS3_OPTS} ${HADOOP_NFS3_SECURE_EXTRA_OPTS}"

View File

@ -15,29 +15,24 @@
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
echo "Usage: mapred [--config confdir] [--daemon (start|stop|status)]"
echo " [--loglevel loglevel] COMMAND"
echo " where COMMAND is one of:"
echo " archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
echo " classpath prints the class path needed for running"
echo " mapreduce subcommands"
echo " distcp <srcurl> <desturl> copy file or directories recursively"
echo " historyserver run job history servers as a standalone daemon"
echo " hsadmin job history server admin interface"
echo " job manipulate MapReduce jobs"
echo " pipes run a Pipes job"
echo " queue get information regarding JobQueues"
echo " sampler sampler"
echo " version print the version"
echo ""
echo "Most commands print help when invoked w/o parameters."
hadoop_add_subcommand "archive" "create a hadoop archive"
hadoop_add_subcommand "classpath" "prints the class path needed for running mapreduce subcommands"
hadoop_add_subcommand "distcp" "copy file or directories recursively"
hadoop_add_subcommand "historyserver" "run job history servers as a standalone daemon"
hadoop_add_subcommand "hsadmin" "job history server admin interface"
hadoop_add_subcommand "job" "manipulate MapReduce jobs"
hadoop_add_subcommand "pipes" "run a Pipes job"
hadoop_add_subcommand "queue" "get information regarding JobQueues"
hadoop_add_subcommand "sampler" "sampler"
hadoop_add_subcommand "version" "print the version"
hadoop_generate_usage "${MYNAME}"
}
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
# let's locate libexec...
if [[ -n "${HADOOP_PREFIX}" ]]; then

54
hadoop-yarn-project/hadoop-yarn/bin/yarn Normal file → Executable file
View File

@ -15,37 +15,30 @@
# See the License for the specific language governing permissions and
# limitations under the License.
MYNAME="${BASH_SOURCE-$0}"
function hadoop_usage
{
echo "Usage: yarn [--config confdir] [COMMAND | CLASSNAME]"
echo " CLASSNAME run the class named CLASSNAME"
echo " or"
echo " where COMMAND is one of:"
echo " application prints application(s) report/kill application"
echo " applicationattempt prints applicationattempt(s) report"
echo " classpath prints the class path needed to get the"
echo " Hadoop jar and the required libraries"
echo " cluster prints cluster information"
echo " container prints container(s) report"
echo " daemonlog get/set the log level for each daemon"
echo " jar <jar> run a jar file"
echo " logs dump container logs"
echo " node prints node report(s)"
echo " nodemanager run a nodemanager on each slave"
echo " proxyserver run the web app proxy server"
echo " queue prints queue information"
echo " resourcemanager run the ResourceManager"
echo " Use -format-state-store for deleting the RMStateStore."
echo " Use -remove-application-from-state-store <appId> for "
echo " removing application from RMStateStore."
echo " rmadmin admin tools"
echo " scmadmin SharedCacheManager admin tools"
echo " sharedcachemanager run the SharedCacheManager daemon"
echo " timelineserver run the timeline server"
echo " top view cluster information"
echo " version print the version"
echo ""
echo "Most commands print help when invoked w/o parameters."
hadoop_add_subcommand "application" "prints application(s) report/kill application"
hadoop_add_subcommand "applicationattempt" "prints applicationattempt(s) report"
hadoop_add_subcommand "classpath" "prints the class path needed to get the hadoop jar and the required libraries"
hadoop_add_subcommand "cluster" "prints cluster information"
hadoop_add_subcommand "container" "prints container(s) report"
hadoop_add_subcommand "daemonlog" "get/set the log level for each daemon"
hadoop_add_subcommand "jar <jar>" "run a jar file"
hadoop_add_subcommand "logs" "dump container logs"
hadoop_add_subcommand "node" "prints node report(s)"
hadoop_add_subcommand "nodemanager" "run a nodemanager on each slave"
hadoop_add_subcommand "proxyserver" "run the web app proxy server"
hadoop_add_subcommand "queue" "prints queue information"
hadoop_add_subcommand "resourcemanager" "run the ResourceManager"
hadoop_add_subcommand "rmadmin" "admin tools"
hadoop_add_subcommand "scmadmin" "SharedCacheManager admin tools"
hadoop_add_subcommand "sharedcachemanager" "run the SharedCacheManager daemon"
hadoop_add_subcommand "timelineserver" "run the timeline server"
hadoop_add_subcommand "top" "view cluster information"
hadoop_add_subcommand "version" "print the version"
hadoop_generate_usage "${MYNAME}" true
}
@ -53,8 +46,7 @@ function hadoop_usage
if [[ -n "${HADOOP_PREFIX}" ]]; then
DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
else
this="${BASH_SOURCE-$0}"
bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
fi