HADOOP-12857. rework hadoop-tools (aw)

This commit is contained in:
Allen Wittenauer 2016-03-02 13:13:36 -08:00
parent 8f85e5d212
commit 738155063e
33 changed files with 651 additions and 132 deletions

View File

@ -0,0 +1,182 @@
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
VERSION=${1:-3.0.0-SNAPSHOT}
TARGETDIR=${2:-/tmp/target}
TOOLSDIR=${3:-/tmp/tools}
function getfilename
{
declare module=$1
declare modtype=$2
if [[ ${modtype} = builtin ]]; then
echo "${TARGETDIR}/hadoop-${VERSION}/libexec/tools/${module}.sh"
else
echo "${TARGETDIR}/hadoop-${VERSION}/libexec/shellprofile.d/${module}.sh"
fi
}
function header
{
declare fn=$1
cat >>"${fn}" <<-'TOKEN'
#!/usr/bin/env bash
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
#
# IMPORTANT: This file is automatically generated by hadoop-dist at
# -Pdist time.
#
#
TOKEN
}
function optional_prologue
{
declare fn=$1
declare module=$2
if [[ -z "${OPTMODS}" ]]; then
OPTMODS=${module}
else
OPTMODS="${OPTMODS},${module}"
fi
{
echo "if hadoop_verify_entry HADOOP_TOOLS_OPTIONS \"${module}\"; then"
echo " hadoop_add_profile \"${module}\""
echo "fi"
echo ""
echo "function _${module}_hadoop_classpath"
echo "{"
} >> "${fn}"
}
function builtin_prologue
{
declare fn=$1
declare module=$2
{
echo ""
echo "function hadoop_classpath_tools_${module}"
echo "{"
} >> "${fn}"
}
function dependencywork
{
declare fn=$1
declare module=$2
declare depfn=$3
declare depline
declare jarname
while read -r depline; do
jarname=$(echo "${depline}" | awk -F: '{print $2"-"$4".jar"}')
if [[ -f "${TARGETDIR}/hadoop-${VERSION}/share/hadoop/tools/lib/${jarname}" ]]; then
{
echo " if [[ -f \"\${HADOOP_TOOLS_HOME}/\${HADOOP_TOOLS_LIB_JARS_DIR}/${jarname}\" ]]; then"
echo " hadoop_add_classpath \"\${HADOOP_TOOLS_HOME}/\${HADOOP_TOOLS_LIB_JARS_DIR}/${jarname}\""
echo " fi"
} >> "${fn}"
elif [[ -f "${TARGETDIR}/hadoop-${VERSION}/share/hadoop/common/${jarname}"
|| -f "${TARGETDIR}/hadoop-${VERSION}/share/hadoop/common/lib/${jarname}" ]]; then
true
else
echo "ERROR: ${module} has missing dependencies: ${jarname}"
fi
done < <(grep compile "${depfn}")
{
echo " hadoop_add_classpath \"\${HADOOP_TOOLS_HOME}/\${HADOOP_TOOLS_LIB_JARS_DIR}/${module}-${VERSION}.jar\""
echo "}"
echo ""
} >> "${fn}"
}
function document_optionals
{
echo "Rewriting ${TARGETDIR}/hadoop-${VERSION}/etc/hadoop/hadoop-env.sh"
sed -e "s^@@@HADOOP_OPTIONAL_TOOLS@@@^${OPTMODS}^" \
"${TARGETDIR}/hadoop-${VERSION}/etc/hadoop/hadoop-env.sh" \
> "${TARGETDIR}/hadoop-${VERSION}/etc/hadoop/hadoop-env.sh.new"
mv "${TARGETDIR}/hadoop-${VERSION}/etc/hadoop/hadoop-env.sh.new" \
"${TARGETDIR}/hadoop-${VERSION}/etc/hadoop/hadoop-env.sh"
}
function process
{
declare fn
declare basefn
declare modtype
declare module
declare newfile
declare newdir
while read -r fn; do
basefn=${fn##*/}
module=$(echo "${basefn}" | cut -f1 -d.)
modtype=$(echo "${basefn}" | cut -f2 -d.)
modtype=${modtype##tools-}
newfile=$(getfilename "${module}" "${modtype}")
newdir=$(dirname "${newfile}")
mkdir -p "${newdir}"
if [[ -f "${newfile}" ]]; then
rm "${newfile}"
fi
touch "${newfile}"
header "${newfile}" "${module}"
"${modtype}_prologue" "${newfile}" "${module}"
dependencywork "${newfile}" "${module}" "${fn}"
chmod a+rx "${newfile}"
done < <(find "${TOOLSDIR}" -name '*.tools-builtin.txt' -o -name '*.tools-optional.txt')
document_optionals
}
process

View File

@ -114,7 +114,7 @@ case ${COMMAND} in
;; ;;
archive) archive)
CLASS=org.apache.hadoop.tools.HadoopArchives CLASS=org.apache.hadoop.tools.HadoopArchives
hadoop_add_to_classpath_toolspath hadoop_add_to_classpath_tools hadoop-archives
;; ;;
checknative) checknative)
CLASS=org.apache.hadoop.util.NativeLibraryChecker CLASS=org.apache.hadoop.util.NativeLibraryChecker
@ -133,11 +133,11 @@ case ${COMMAND} in
;; ;;
distch) distch)
CLASS=org.apache.hadoop.tools.DistCh CLASS=org.apache.hadoop.tools.DistCh
hadoop_add_to_classpath_toolspath hadoop_add_to_classpath_tools hadoop-extras
;; ;;
distcp) distcp)
CLASS=org.apache.hadoop.tools.DistCp CLASS=org.apache.hadoop.tools.DistCp
hadoop_add_to_classpath_toolspath hadoop_add_to_classpath_tools hadoop-distcp
;; ;;
envvars) envvars)
echo "JAVA_HOME='${JAVA_HOME}'" echo "JAVA_HOME='${JAVA_HOME}'"
@ -146,7 +146,9 @@ case ${COMMAND} in
echo "HADOOP_COMMON_LIB_JARS_DIR='${HADOOP_COMMON_LIB_JARS_DIR}'" echo "HADOOP_COMMON_LIB_JARS_DIR='${HADOOP_COMMON_LIB_JARS_DIR}'"
echo "HADOOP_COMMON_LIB_NATIVE_DIR='${HADOOP_COMMON_LIB_NATIVE_DIR}'" echo "HADOOP_COMMON_LIB_NATIVE_DIR='${HADOOP_COMMON_LIB_NATIVE_DIR}'"
echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'" echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
echo "HADOOP_TOOLS_PATH='${HADOOP_TOOLS_PATH}'" echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
exit 0 exit 0
;; ;;
fs) fs)

View File

@ -41,6 +41,44 @@ function hadoop_debug
fi fi
} }
## @description Given variable $1 delete $2 from it
## @audience public
## @stability stable
## @replaceable no
function hadoop_delete_entry
{
if [[ ${!1} =~ \ ${2}\ ]] ; then
hadoop_debug "Removing ${2} from ${1}"
eval "${1}"=\""${!1// ${2} }"\"
fi
}
## @description Given variable $1 add $2 to it
## @audience public
## @stability stable
## @replaceable no
function hadoop_add_entry
{
if [[ ! ${!1} =~ \ ${2}\ ]] ; then
hadoop_debug "Adding ${2} to ${1}"
#shellcheck disable=SC2140
eval "${1}"=\""${!1} ${2} "\"
fi
}
## @description Given variable $1 determine if $2 is in it
## @audience public
## @stability stable
## @replaceable no
## @return 0 = yes, 1 = no
function hadoop_verify_entry
{
# this unfortunately can't really be tested by bats. :(
# so if this changes, be aware that unit tests effectively
# do this function in them
[[ ${!1} =~ \ ${2}\ ]]
}
## @description Add a subcommand to the usage output ## @description Add a subcommand to the usage output
## @audience private ## @audience private
## @stability evolving ## @stability evolving
@ -264,10 +302,9 @@ function hadoop_bootstrap
YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"} YARN_LIB_JARS_DIR=${YARN_LIB_JARS_DIR:-"share/hadoop/yarn/lib"}
MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"} MAPRED_DIR=${MAPRED_DIR:-"share/hadoop/mapreduce"}
MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"} MAPRED_LIB_JARS_DIR=${MAPRED_LIB_JARS_DIR:-"share/hadoop/mapreduce/lib"}
HADOOP_TOOLS_HOME=${HADOOP_TOOLS_HOME:-${HADOOP_PREFIX}}
# setup a default HADOOP_TOOLS_PATH HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
hadoop_deprecate_envvar TOOL_PATH HADOOP_TOOLS_PATH HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}
HADOOP_TOOLS_PATH=${HADOOP_TOOLS_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
# usage output set to zero # usage output set to zero
hadoop_reset_usage hadoop_reset_usage
@ -322,6 +359,7 @@ function hadoop_exec_hadoopenv
if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then if [[ -z "${HADOOP_ENV_PROCESSED}" ]]; then
if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then if [[ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]]; then
export HADOOP_ENV_PROCESSED=true export HADOOP_ENV_PROCESSED=true
# shellcheck disable=SC1090
. "${HADOOP_CONF_DIR}/hadoop-env.sh" . "${HADOOP_CONF_DIR}/hadoop-env.sh"
fi fi
fi fi
@ -334,6 +372,7 @@ function hadoop_exec_hadoopenv
function hadoop_exec_userfuncs function hadoop_exec_userfuncs
{ {
if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
# shellcheck disable=SC1090
. "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" . "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
fi fi
} }
@ -348,6 +387,7 @@ function hadoop_exec_hadooprc
{ {
if [[ -f "${HOME}/.hadooprc" ]]; then if [[ -f "${HOME}/.hadooprc" ]]; then
hadoop_debug "Applying the user's .hadooprc" hadoop_debug "Applying the user's .hadooprc"
# shellcheck disable=SC1090
. "${HOME}/.hadooprc" . "${HOME}/.hadooprc"
fi fi
} }
@ -373,11 +413,22 @@ function hadoop_import_shellprofiles
files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh) files2=(${HADOOP_CONF_DIR}/shellprofile.d/*.sh)
fi fi
# enable bundled shellprofiles that come
# from hadoop-tools. This converts the user-facing HADOOP_OPTIONAL_TOOLS
# to the HADOOP_TOOLS_OPTIONS that the shell profiles expect.
# See dist-tools-hooks-maker for how the example HADOOP_OPTIONAL_TOOLS
# gets populated into hadoop-env.sh
for i in ${HADOOP_OPTIONAL_TOOLS//,/ }; do
hadoop_add_entry HADOOP_TOOLS_OPTIONS "${i}"
done
for i in "${files1[@]}" "${files2[@]}" for i in "${files1[@]}" "${files2[@]}"
do do
if [[ -n "${i}" if [[ -n "${i}"
&& -f "${i}" ]]; then && -f "${i}" ]]; then
hadoop_debug "Profiles: importing ${i}" hadoop_debug "Profiles: importing ${i}"
# shellcheck disable=SC1090
. "${i}" . "${i}"
fi fi
done done
@ -945,34 +996,25 @@ function hadoop_add_common_to_classpath
hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*' hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
} }
## @description Add the HADOOP_TOOLS_PATH to the classpath ## @description Run libexec/tools/module.sh to add to the classpath
## @description environment ## @description environment
## @audience private ## @audience private
## @stability evolving ## @stability evolving
## @replaceable yes ## @replaceable yes
function hadoop_add_to_classpath_toolspath ## @param module
function hadoop_add_to_classpath_tools
{ {
declare -a array declare module=$1
declare -i c=0
declare -i j
declare -i i
declare idx
if [[ -n "${HADOOP_TOOLS_PATH}" ]]; then if [[ -f "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh" ]]; then
hadoop_debug "Adding HADOOP_TOOLS_PATH to CLASSPATH" # shellcheck disable=SC1090
oldifs=${IFS} . "${HADOOP_LIBEXEC_DIR}/tools/${module}.sh"
IFS=: else
for idx in ${HADOOP_TOOLS_PATH}; do hadoop_error "ERROR: Tools helper ${HADOOP_LIBEXEC_DIR}/tools/${module}.sh was not found."
array[${c}]=${idx} fi
((c=c+1))
done
IFS=${oldifs}
((j=c-1)) || ${QATESTMODE}
for ((i=0; i<=j; i++)); do
hadoop_add_classpath "${array[$i]}" after
done
if declare -f hadoop_classpath_tools_${module} >/dev/null 2>&1; then
"hadoop_classpath_tools_${module}"
fi fi
} }

View File

@ -1,3 +1,4 @@
#!/usr/bin/env bash
# Copyright 2014 The Apache Software Foundation # Copyright 2014 The Apache Software Foundation
# #
# Licensed to the Apache Software Foundation (ASF) under one # Licensed to the Apache Software Foundation (ASF) under one
@ -87,7 +88,10 @@
# Misc paths # Misc paths
#### ####
# setup a default HADOOP_TOOLS_PATH, where things like distcp lives # This is where things like distcp, S3, and other things live
# note that this path only gets added for certain commands and not # note that this path only gets added for certain commands and not
# part of the general classpath # part of the general classpath unless HADOOP_OPTIONAL_TOOLS is used
# export HADOOP_TOOLS_PATH="${HADOOP_PREFIX}/share/hadoop/tools/lib/*" # to configure them in
# export HADOOP_TOOLS_HOME=${HADOOP_PREFIX}
# export HADOOP_TOOLS_DIR=${HADOOP_TOOLS_DIR:-"share/hadoop/tools"}
# export HADOOP_TOOLS_LIB_JARS_DIR=${HADOOP_TOOLS_LIB_JARS_DIR:-"${HADOOP_TOOLS_DIR}/lib"}

View File

@ -49,7 +49,7 @@
# preferred. Many sites configure these options outside of Hadoop, # preferred. Many sites configure these options outside of Hadoop,
# such as in /etc/profile.d # such as in /etc/profile.d
# The java implementation to use. By default, this environment # The java implementation to use. By default, this environment
# variable is REQUIRED on ALL platforms except OS X! # variable is REQUIRED on ALL platforms except OS X!
# export JAVA_HOME= # export JAVA_HOME=
@ -64,15 +64,15 @@
# path. # path.
# export HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop # export HADOOP_CONF_DIR=$HADOOP_PREFIX/etc/hadoop
# The maximum amount of heap to use (Java -Xmx). If no unit # The maximum amount of heap to use (Java -Xmx). If no unit
# is provided, it will be converted to MB. Daemons will # is provided, it will be converted to MB. Daemons will
# prefer any Xmx setting in their respective _OPT variable. # prefer any Xmx setting in their respective _OPT variable.
# There is no default; the JVM will autoscale based upon machine # There is no default; the JVM will autoscale based upon machine
# memory size. # memory size.
# export HADOOP_HEAPSIZE_MAX= # export HADOOP_HEAPSIZE_MAX=
# The minimum amount of heap to use (Java -Xms). If no unit # The minimum amount of heap to use (Java -Xms). If no unit
# is provided, it will be converted to MB. Daemons will # is provided, it will be converted to MB. Daemons will
# prefer any Xms setting in their respective _OPT variable. # prefer any Xms setting in their respective _OPT variable.
# There is no default; the JVM will autoscale based upon machine # There is no default; the JVM will autoscale based upon machine
# memory size. # memory size.
@ -107,8 +107,8 @@ case ${HADOOP_OS_TYPE} in
esac esac
# Extra Java runtime options for some Hadoop commands # Extra Java runtime options for some Hadoop commands
# and clients (i.e., hdfs dfs -blah). These get appended to HADOOP_OPTS for # and clients (i.e., hdfs dfs -blah). These get appended to HADOOP_OPTS for
# such commands. In most cases, # this should be left empty and # such commands. In most cases, # this should be left empty and
# let users supply it on the command line. # let users supply it on the command line.
# export HADOOP_CLIENT_OPTS="" # export HADOOP_CLIENT_OPTS=""
@ -146,6 +146,11 @@ esac
# names starting with a '-' are treated as negative matches. For example, # names starting with a '-' are treated as negative matches. For example,
# export HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES="-org.apache.hadoop.UserClass,java.,javax.,org.apache.hadoop." # export HADOOP_CLIENT_CLASSLOADER_SYSTEM_CLASSES="-org.apache.hadoop.UserClass,java.,javax.,org.apache.hadoop."
# Enable optional, bundled Hadoop features
# This is a comma delimited list. It may NOT be overridden via .hadooprc
# Entries may be added/removed as needed.
# export HADOOP_OPTIONAL_TOOLS="@@@HADOOP_OPTIONAL_TOOLS@@@"
### ###
# Options for remote shell connectivity # Options for remote shell connectivity
### ###
@ -181,7 +186,7 @@ esac
# non-secure) # non-secure)
# #
# Where (primarily) daemon log files are stored. # $HADOOP_PREFIX/logs # Where (primarily) daemon log files are stored. # $HADOOP_PREFIX/logs
# by default. # by default.
# Java property: hadoop.log.dir # Java property: hadoop.log.dir
# export HADOOP_LOG_DIR=${HADOOP_PREFIX}/logs # export HADOOP_LOG_DIR=${HADOOP_PREFIX}/logs
@ -201,7 +206,7 @@ esac
# Java property: hadoop.root.logger # Java property: hadoop.root.logger
# export HADOOP_ROOT_LOGGER=INFO,console # export HADOOP_ROOT_LOGGER=INFO,console
# Default log4j setting for daemons spawned explicitly by # Default log4j setting for daemons spawned explicitly by
# --daemon option of hadoop, hdfs, mapred and yarn command. # --daemon option of hadoop, hdfs, mapred and yarn command.
# Java property: hadoop.root.logger # Java property: hadoop.root.logger
# export HADOOP_DAEMON_ROOT_LOGGER=INFO,RFA # export HADOOP_DAEMON_ROOT_LOGGER=INFO,RFA

View File

@ -0,0 +1,47 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
toolsetup () {
HADOOP_LIBEXEC_DIR="${TMP}/libexec"
mkdir -p "${HADOOP_LIBEXEC_DIR}/tools"
}
@test "hadoop_classpath_tools (load)" {
toolsetup
echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/tools/test.sh"
hadoop_add_to_classpath_tools test
[ -n "${unittest}" ]
}
@test "hadoop_classpath_tools (not exist)" {
toolsetup
hadoop_add_to_classpath_tools test
[ -z "${unittest}" ]
}
@test "hadoop_classpath_tools (function)" {
toolsetup
{
echo "function hadoop_classpath_tools_test {"
echo " unittest=libexec"
echo " }"
} > "${HADOOP_LIBEXEC_DIR}/tools/test.sh"
hadoop_add_to_classpath_tools test
declare -f
[ -n "${unittest}" ]
}

View File

@ -1,74 +0,0 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
freetheclasses () {
local j
for j in HADOOP_TOOLS_PATH \
CLASSPATH; do
unset ${j}
done
}
createdirs () {
local j
for j in new old foo bar baz; do
mkdir -p "${TMP}/${j}"
done
}
@test "hadoop_add_to_classpath_toolspath (nothing)" {
freetheclasses
hadoop_add_to_classpath_toolspath
[ -z "${CLASSPATH}" ]
}
@test "hadoop_add_to_classpath_toolspath (none)" {
freetheclasses
CLASSPATH=test
hadoop_add_to_classpath_toolspath
[ "${CLASSPATH}" = "test" ]
}
@test "hadoop_add_to_classpath_toolspath (only)" {
freetheclasses
createdirs
HADOOP_TOOLS_PATH="${TMP}/new"
hadoop_add_to_classpath_toolspath
[ "${CLASSPATH}" = "${TMP}/new" ]
}
@test "hadoop_add_to_classpath_toolspath (1+1)" {
freetheclasses
createdirs
CLASSPATH=${TMP}/foo
HADOOP_TOOLS_PATH=${TMP}/foo
hadoop_add_to_classpath_toolspath
echo ">${CLASSPATH}<"
[ ${CLASSPATH} = "${TMP}/foo" ]
}
@test "hadoop_add_to_classpath_toolspath (3+2)" {
freetheclasses
createdirs
CLASSPATH=${TMP}/foo:${TMP}/bar:${TMP}/baz
HADOOP_TOOLS_PATH=${TMP}/new:${TMP}/old
hadoop_add_to_classpath_toolspath
echo ">${CLASSPATH}<"
[ ${CLASSPATH} = "${TMP}/foo:${TMP}/bar:${TMP}/baz:${TMP}/new:${TMP}/old" ]
}

View File

@ -38,7 +38,8 @@ basicinitsetup () {
dirvars="HADOOP_COMMON_HOME \ dirvars="HADOOP_COMMON_HOME \
HADOOP_MAPRED_HOME \ HADOOP_MAPRED_HOME \
HADOOP_HDFS_HOME \ HADOOP_HDFS_HOME \
HADOOP_YARN_HOME" HADOOP_YARN_HOME \
HADOOP_TOOLS_HOME"
for j in ${testvars}; do for j in ${testvars}; do
unset ${j} unset ${j}

View File

@ -15,13 +15,13 @@
load hadoop-functions_test_helper load hadoop-functions_test_helper
@test "hadoop_deprecate_envvar (no libexec)" { @test "hadoop_bootstrap (no libexec)" {
unset HADOOP_LIBEXEC_DIR unset HADOOP_LIBEXEC_DIR
run hadoop_bootstrap run hadoop_bootstrap
[ "${status}" -eq 1 ] [ "${status}" -eq 1 ]
} }
@test "hadoop_deprecate_envvar (libexec)" { @test "hadoop_bootstrap (libexec)" {
unset HADOOP_PREFIX unset HADOOP_PREFIX
unset HADOOP_COMMON_DIR unset HADOOP_COMMON_DIR
unset HADOOP_COMMON_LIB_JARS_DIR unset HADOOP_COMMON_LIB_JARS_DIR
@ -31,7 +31,9 @@ load hadoop-functions_test_helper
unset YARN_LIB_JARS_DIR unset YARN_LIB_JARS_DIR
unset MAPRED_DIR unset MAPRED_DIR
unset MAPRED_LIB_JARS_DIR unset MAPRED_LIB_JARS_DIR
unset TOOL_PATH unset HADOOP_TOOLS_HOME
unset HADOOP_TOOLS_DIR
unset HADOOP_TOOLS_LIB_JARS_DIR
unset HADOOP_OS_TYPE unset HADOOP_OS_TYPE
hadoop_bootstrap hadoop_bootstrap
@ -46,6 +48,9 @@ load hadoop-functions_test_helper
[ -n ${YARN_LIB_JARS_DIR} ] [ -n ${YARN_LIB_JARS_DIR} ]
[ -n ${MAPRED_DIR} ] [ -n ${MAPRED_DIR} ]
[ -n ${MAPRED_LIB_JARS_DIR} ] [ -n ${MAPRED_LIB_JARS_DIR} ]
[ -n ${TOOL_PATH} ]
[ -n ${HADOOP_OS_TYPE} ] [ -n ${HADOOP_OS_TYPE} ]
} [ -n ${HADOOP_TOOLS_PATH} ]
[ -n ${HADOOP_TOOLS_HOME} ]
[ -n ${HADOOP_TOOLS_DIR} ]
[ -n ${HADOOP_TOOLS_LIB_JARS_DIR} ]
}

View File

@ -0,0 +1,49 @@
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
load hadoop-functions_test_helper
@test "hadoop_add_entry (positive 1)" {
hadoop_add_entry testvar foo
echo ">${testvar}<"
[ "${testvar}" = " foo " ]
}
@test "hadoop_add_entry (negative)" {
hadoop_add_entry testvar foo
hadoop_add_entry testvar foo
echo ">${testvar}<"
[ "${testvar}" = " foo " ]
}
@test "hadoop_add_entry (positive 2)" {
hadoop_add_entry testvar foo
hadoop_add_entry testvar foo
hadoop_add_entry testvar bar
echo ">${testvar}<"
[ "${testvar}" = " foo bar " ]
}
@test "hadoop_add_entry (positive 3)" {
hadoop_add_entry testvar foo
hadoop_add_entry testvar foo
hadoop_add_entry testvar bar
hadoop_add_entry testvar bar
hadoop_add_entry testvar baz
hadoop_add_entry testvar baz
echo ">${testvar}<"
[ "${testvar}" = " foo bar baz " ]
}

View File

@ -66,6 +66,13 @@ _test_hadoop_finalize () {
[ -z "${unittest}" ] [ -z "${unittest}" ]
} }
@test "hadoop_import_shellprofiles (H_O_T)" {
HADOOP_OPTIONAL_TOOLS=1,2
shellprofilesetup
hadoop_import_shellprofiles
[ "${HADOOP_TOOLS_OPTIONS}" == " 1 2 " ]
}
@test "hadoop_add_profile+hadoop_shellprofiles_init" { @test "hadoop_add_profile+hadoop_shellprofiles_init" {
hadoop_add_profile test hadoop_add_profile test
hadoop_shellprofiles_init hadoop_shellprofiles_init

View File

@ -103,6 +103,24 @@
</arguments> </arguments>
</configuration> </configuration>
</execution> </execution>
<execution>
<id>toolshooks</id>
<phase>prepare-package</phase>
<goals>
<goal>exec</goal>
</goals>
<configuration>
<executable>${shell-executable}</executable>
<workingDirectory>${basedir}</workingDirectory>
<requiresOnline>false</requiresOnline>
<arguments>
<argument>${basedir}/../dev-support/bin/dist-tools-hooks-maker</argument>
<argument>${project.version}</argument>
<argument>${project.build.directory}</argument>
<argument>${basedir}/../hadoop-tools</argument>
</arguments>
</configuration>
</execution>
<execution> <execution>
<id>tar</id> <id>tar</id>
<phase>package</phase> <phase>package</phase>

View File

@ -141,7 +141,9 @@ case ${COMMAND} in
echo "HDFS_DIR='${HDFS_DIR}'" echo "HDFS_DIR='${HDFS_DIR}'"
echo "HDFS_LIB_JARS_DIR='${HDFS_LIB_JARS_DIR}'" echo "HDFS_LIB_JARS_DIR='${HDFS_LIB_JARS_DIR}'"
echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'" echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
echo "HADOOP_TOOLS_PATH='${HADOOP_TOOLS_PATH}'" echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
exit 0 exit 0
;; ;;
erasurecode) erasurecode)
@ -165,7 +167,6 @@ case ${COMMAND} in
;; ;;
haadmin) haadmin)
CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin CLASS=org.apache.hadoop.hdfs.tools.DFSHAAdmin
hadoop_add_to_classpath_toolspath
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS" hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}" HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
;; ;;

View File

@ -69,13 +69,13 @@ case ${COMMAND} in
;; ;;
archive) archive)
CLASS=org.apache.hadoop.tools.HadoopArchives CLASS=org.apache.hadoop.tools.HadoopArchives
hadoop_add_to_classpath_toolspath hadoop_add_to_classpath_tools hadoop-archives
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS" hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}" HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
;; ;;
archive-logs) archive-logs)
CLASS=org.apache.hadoop.tools.HadoopArchiveLogs CLASS=org.apache.hadoop.tools.HadoopArchiveLogs
hadoop_add_to_classpath_toolspath hadoop_add_to_classpath_tools hadoop-archive-logs
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS" hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}" HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
;; ;;
@ -84,7 +84,7 @@ case ${COMMAND} in
;; ;;
distcp) distcp)
CLASS=org.apache.hadoop.tools.DistCp CLASS=org.apache.hadoop.tools.DistCp
hadoop_add_to_classpath_toolspath hadoop_add_to_classpath_tools hadoop-distcp
hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS" hadoop_debug "Appending HADOOP_CLIENT_OPTS onto HADOOP_OPTS"
HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}" HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
;; ;;
@ -94,7 +94,9 @@ case ${COMMAND} in
echo "MAPRED_DIR='${MAPRED_DIR}'" echo "MAPRED_DIR='${MAPRED_DIR}'"
echo "MAPRED_LIB_JARS_DIR='${MAPRED_LIB_JARS_DIR}'" echo "MAPRED_LIB_JARS_DIR='${MAPRED_LIB_JARS_DIR}'"
echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'" echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
echo "HADOOP_TOOLS_PATH='${HADOOP_TOOLS_PATH}'" echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
exit 0 exit 0
;; ;;
historyserver) historyserver)

View File

@ -172,6 +172,23 @@
</archive> </archive>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- referenced by a built-in command -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-builtin.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
<plugin> <plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>findbugs-maven-plugin</artifactId> <artifactId>findbugs-maven-plugin</artifactId>

View File

@ -128,6 +128,23 @@
</archive> </archive>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- referenced by a built-in command -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-builtin.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>

View File

@ -90,6 +90,23 @@
<forkedProcessTimeoutInSeconds>3600</forkedProcessTimeoutInSeconds> <forkedProcessTimeoutInSeconds>3600</forkedProcessTimeoutInSeconds>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- build a shellprofile -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-optional.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>

View File

@ -19,6 +19,9 @@ JAR file, `hadoop-aws.jar` also declares a transitive dependency on all
external artifacts which are needed for this support —enabling downstream external artifacts which are needed for this support —enabling downstream
applications to easily use this support. applications to easily use this support.
To make it part of Apache Hadoop's default classpath, simply make sure that
HADOOP_OPTIONAL_TOOLS in hadoop-env.sh has 'hadoop-aws' in the list.
Features Features
1. The "classic" `s3:` filesystem for storing objects in Amazon S3 Storage 1. The "classic" `s3:` filesystem for storing objects in Amazon S3 Storage
@ -30,7 +33,7 @@ higher performance.
The specifics of using these filesystems are documented below. The specifics of using these filesystems are documented below.
## Warning: Object Stores are not filesystems. ## Warning #1: Object Stores are not filesystems.
Amazon S3 is an example of "an object store". In order to achieve scalability Amazon S3 is an example of "an object store". In order to achieve scalability
and especially high availability, S3 has —as many other cloud object stores have and especially high availability, S3 has —as many other cloud object stores have

View File

@ -82,6 +82,24 @@
</executions> </executions>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- build a shellprofile -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-optional.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>

View File

@ -34,6 +34,9 @@ The built jar file, named hadoop-azure.jar, also declares transitive dependencie
on the additional artifacts it requires, notably the on the additional artifacts it requires, notably the
[Azure Storage SDK for Java](https://github.com/Azure/azure-storage-java). [Azure Storage SDK for Java](https://github.com/Azure/azure-storage-java).
To make it part of Apache Hadoop's default classpath, simply make sure that
HADOOP_OPTIONAL_TOOLS in hadoop-env.sh has 'hadoop-azure' in the list.
## <a name="Features" />Features ## <a name="Features" />Features
* Read and write data stored in an Azure Blob Storage account. * Read and write data stored in an Azure Blob Storage account.

View File

@ -132,6 +132,22 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId> <artifactId>maven-jar-plugin</artifactId>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<outputFile>${project.basedir}/../../hadoop-dist/target/hadoop-tools-deps/${project.artifactId}.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>

View File

@ -163,6 +163,17 @@
<outputDirectory>${project.build.directory}/lib</outputDirectory> <outputDirectory>${project.build.directory}/lib</outputDirectory>
</configuration> </configuration>
</execution> </execution>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- referenced by a built-in command -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-builtin.txt</outputFile>
</configuration>
</execution>
</executions> </executions>
</plugin> </plugin>
<plugin> <plugin>

View File

@ -137,6 +137,23 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId> <artifactId>maven-jar-plugin</artifactId>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- referenced by a built-in command -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-builtin.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>

View File

@ -163,6 +163,23 @@
</archive> </archive>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- referenced by a built-in command -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-builtin.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>

View File

@ -88,6 +88,23 @@
<forkedProcessTimeoutInSeconds>3600</forkedProcessTimeoutInSeconds> <forkedProcessTimeoutInSeconds>3600</forkedProcessTimeoutInSeconds>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- build a shellprofile -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-optional.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>

View File

@ -85,6 +85,23 @@
<dependencyLocationsEnabled>false</dependencyLocationsEnabled> <dependencyLocationsEnabled>false</dependencyLocationsEnabled>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- build a shellprofile -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-optional.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>

View File

@ -54,6 +54,9 @@ Introduction
This module enables Apache Hadoop applications -including MapReduce jobs, read and write data to and from instances of the [OpenStack Swift object store](http://www.openstack.org/software/openstack-storage/). This module enables Apache Hadoop applications -including MapReduce jobs, read and write data to and from instances of the [OpenStack Swift object store](http://www.openstack.org/software/openstack-storage/).
To make it part of Apache Hadoop's default classpath, simply make sure that
HADOOP_OPTIONAL_TOOLS in hadoop-env.sh has 'hadoop-openstack' in the list.
Features Features
-------- --------
@ -440,7 +443,9 @@ If the host is declared, the proxy port must be set to a valid integer value.
The `hadoop-openstack` JAR -or any dependencies- may not be on your classpath. The `hadoop-openstack` JAR -or any dependencies- may not be on your classpath.
If it is a remote MapReduce job that is failing, make sure that the JAR is installed on the servers in the cluster -or that the job submission process uploads the JAR file to the distributed cache. Make sure that the:
* JAR is installed on the servers in the cluster.
* 'hadoop-openstack' is on the HADOOP_OPTIONAL_TOOLS entry in hadoop-env.sh or that the job submission process uploads the JAR file to the distributed cache.
#### Failure to Authenticate #### Failure to Authenticate

View File

@ -132,6 +132,23 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-jar-plugin</artifactId> <artifactId>maven-jar-plugin</artifactId>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- referenced by a built-in command -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-builtin.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>

View File

@ -177,6 +177,23 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- referenced by a built-in command -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-builtin.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
</build> </build>
</profile> </profile>

View File

@ -55,7 +55,7 @@ function parse_args()
function calculate_classpath() function calculate_classpath()
{ {
hadoop_add_to_classpath_toolspath hadoop_add_to_classpath_tools hadoop-rumen
} }
function run_sls_generator() function run_sls_generator()

View File

@ -70,7 +70,7 @@ function parse_args()
function calculate_classpath function calculate_classpath
{ {
hadoop_add_to_classpath_toolspath hadoop_add_to_classpath_tools hadoop-sls
hadoop_debug "Injecting ${HADOOP_PREFIX}/share/hadoop/tools/sls/html into CLASSPATH" hadoop_debug "Injecting ${HADOOP_PREFIX}/share/hadoop/tools/sls/html into CLASSPATH"
hadoop_add_classpath "${HADOOP_PREFIX}/share/hadoop/tools/sls/html" hadoop_add_classpath "${HADOOP_PREFIX}/share/hadoop/tools/sls/html"
} }

View File

@ -174,6 +174,23 @@
</archive> </archive>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-dependency-plugin</artifactId>
<executions>
<execution>
<id>deplist</id>
<phase>compile</phase>
<goals>
<goal>list</goal>
</goals>
<configuration>
<!-- referenced by a built-in command -->
<outputFile>${project.basedir}/target/hadoop-tools-deps/${project.artifactId}.tools-builtin.txt</outputFile>
</configuration>
</execution>
</executions>
</plugin>
</plugins> </plugins>
<testResources> <testResources>
<testResource> <testResource>

View File

@ -103,7 +103,9 @@ case "${COMMAND}" in
echo "YARN_DIR='${YARN_DIR}'" echo "YARN_DIR='${YARN_DIR}'"
echo "YARN_LIB_JARS_DIR='${YARN_LIB_JARS_DIR}'" echo "YARN_LIB_JARS_DIR='${YARN_LIB_JARS_DIR}'"
echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'" echo "HADOOP_CONF_DIR='${HADOOP_CONF_DIR}'"
echo "HADOOP_TOOLS_PATH='${HADOOP_TOOLS_PATH}'" echo "HADOOP_TOOLS_HOME='${HADOOP_TOOLS_HOME}'"
echo "HADOOP_TOOLS_DIR='${HADOOP_TOOLS_DIR}'"
echo "HADOOP_TOOLS_LIB_JARS_DIR='${HADOOP_TOOLS_LIB_JARS_DIR}'"
exit 0 exit 0
;; ;;
jar) jar)