HADOOP-11022. User replaced functions get lost 2-3 levels deep (e.g., sbin) (aw)
This commit is contained in:
parent
3e85f5b605
commit
7971c97ec1
|
@ -327,6 +327,9 @@ Trunk (Unreleased)
|
||||||
|
|
||||||
HADOOP-11055. non-daemon pid files are missing (aw)
|
HADOOP-11055. non-daemon pid files are missing (aw)
|
||||||
|
|
||||||
|
HADOOP-11022. User replaced functions get lost 2-3 levels deep (e.g.,
|
||||||
|
sbin) (aw)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
HADOOP-7761. Improve the performance of raw comparisons. (todd)
|
HADOOP-7761. Improve the performance of raw comparisons. (todd)
|
||||||
|
|
|
@ -156,6 +156,7 @@ done
|
||||||
|
|
||||||
hadoop_find_confdir
|
hadoop_find_confdir
|
||||||
hadoop_exec_hadoopenv
|
hadoop_exec_hadoopenv
|
||||||
|
hadoop_exec_userfuncs
|
||||||
|
|
||||||
#
|
#
|
||||||
# IMPORTANT! User provided code is now available!
|
# IMPORTANT! User provided code is now available!
|
||||||
|
|
|
@ -104,6 +104,15 @@ function hadoop_exec_hadoopenv
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function hadoop_exec_userfuncs
|
||||||
|
{
|
||||||
|
# NOTE: This function is not user replaceable.
|
||||||
|
|
||||||
|
if [[ -e "${HADOOP_CONF_DIR}/hadoop-user-functions.sh" ]]; then
|
||||||
|
. "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
function hadoop_basic_init
|
function hadoop_basic_init
|
||||||
{
|
{
|
||||||
# Some of these are also set in hadoop-env.sh.
|
# Some of these are also set in hadoop-env.sh.
|
||||||
|
|
|
@ -337,84 +337,3 @@ esac
|
||||||
# via this special env var:
|
# via this special env var:
|
||||||
# HADOOP_ENABLE_BUILD_PATHS="true"
|
# HADOOP_ENABLE_BUILD_PATHS="true"
|
||||||
|
|
||||||
# You can do things like replace parts of the shell underbelly.
|
|
||||||
# Most of this code is in hadoop-functions.sh.
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# For example, if you want to add compression to the rotation
|
|
||||||
# menthod for the .out files that daemons generate, you can do
|
|
||||||
# that by redefining the hadoop_rotate_log function by
|
|
||||||
# uncommenting this code block:
|
|
||||||
|
|
||||||
#function hadoop_rotate_log
|
|
||||||
#{
|
|
||||||
# #
|
|
||||||
# # log rotation (mainly used for .out files)
|
|
||||||
# # Users are likely to replace this one for something
|
|
||||||
# # that gzips or uses dates or who knows what.
|
|
||||||
# #
|
|
||||||
# # be aware that &1 and &2 might go through here
|
|
||||||
# # so don't do anything too crazy...
|
|
||||||
# #
|
|
||||||
# local log=$1;
|
|
||||||
# local num=${2:-5};
|
|
||||||
#
|
|
||||||
# if [[ -f "${log}" ]]; then # rotate logs
|
|
||||||
# while [[ ${num} -gt 1 ]]; do
|
|
||||||
# #shellcheck disable=SC2086
|
|
||||||
# let prev=${num}-1
|
|
||||||
# if [[ -f "${log}.${prev}" ]]; then
|
|
||||||
# mv "${log}.${prev}" "${log}.${num}"
|
|
||||||
# fi
|
|
||||||
# num=${prev}
|
|
||||||
# done
|
|
||||||
# mv "${log}" "${log}.${num}"
|
|
||||||
# gzip -9 "${log}.${num}"
|
|
||||||
# fi
|
|
||||||
#}
|
|
||||||
#
|
|
||||||
#
|
|
||||||
# Another example: finding java
|
|
||||||
#
|
|
||||||
# By default, Hadoop assumes that $JAVA_HOME is always defined
|
|
||||||
# outside of its configuration. Eons ago, Apple standardized
|
|
||||||
# on a helper program called java_home to find it for you.
|
|
||||||
#
|
|
||||||
#function hadoop_java_setup
|
|
||||||
#{
|
|
||||||
#
|
|
||||||
# if [[ -z "${JAVA_HOME}" ]]; then
|
|
||||||
# case $HADOOP_OS_TYPE in
|
|
||||||
# Darwin*)
|
|
||||||
# JAVA_HOME=$(/usr/libexec/java_home)
|
|
||||||
# ;;
|
|
||||||
# esac
|
|
||||||
# fi
|
|
||||||
#
|
|
||||||
# # Bail if we did not detect it
|
|
||||||
# if [[ -z "${JAVA_HOME}" ]]; then
|
|
||||||
# echo "ERROR: JAVA_HOME is not set and could not be found." 1>&2
|
|
||||||
# exit 1
|
|
||||||
# fi
|
|
||||||
#
|
|
||||||
# if [[ ! -d "${JAVA_HOME}" ]]; then
|
|
||||||
# echo "ERROR: JAVA_HOME (${JAVA_HOME}) does not exist." 1>&2
|
|
||||||
# exit 1
|
|
||||||
# fi
|
|
||||||
#
|
|
||||||
# JAVA="${JAVA_HOME}/bin/java"
|
|
||||||
#
|
|
||||||
# if [[ ! -x ${JAVA} ]]; then
|
|
||||||
# echo "ERROR: ${JAVA} is not executable." 1>&2
|
|
||||||
# exit 1
|
|
||||||
# fi
|
|
||||||
# JAVA_HEAP_MAX=-Xmx1g
|
|
||||||
# HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-128}
|
|
||||||
#
|
|
||||||
# # check envvars which might override default args
|
|
||||||
# if [[ -n "$HADOOP_HEAPSIZE" ]]; then
|
|
||||||
# JAVA_HEAP_MAX="-Xmx${HADOOP_HEAPSIZE}m"
|
|
||||||
# fi
|
|
||||||
#}
|
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,94 @@
|
||||||
|
#
|
||||||
|
# Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
# or more contributor license agreements. See the NOTICE file
|
||||||
|
# distributed with this work for additional information
|
||||||
|
# regarding copyright ownership. The ASF licenses this file
|
||||||
|
# to you under the Apache License, Version 2.0 (the
|
||||||
|
# "License"); you may not use this file except in compliance
|
||||||
|
# with the License. You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
#######
|
||||||
|
# Advanced Users Only
|
||||||
|
######
|
||||||
|
|
||||||
|
# You can do things like replace parts of the shell underbelly.
|
||||||
|
# Most of this code is in hadoop-functions.sh.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# For example, if you want to add compression to the rotation
|
||||||
|
# menthod for the .out files that daemons generate, you can do
|
||||||
|
# that by redefining the hadoop_rotate_log function by
|
||||||
|
# uncommenting this code block:
|
||||||
|
|
||||||
|
#function hadoop_rotate_log
|
||||||
|
#{
|
||||||
|
# local log=$1;
|
||||||
|
# local num=${2:-5};
|
||||||
|
#
|
||||||
|
# if [[ -f "${log}" ]]; then
|
||||||
|
# while [[ ${num} -gt 1 ]]; do
|
||||||
|
# #shellcheck disable=SC2086
|
||||||
|
# let prev=${num}-1
|
||||||
|
# if [[ -f "${log}.${prev}.gz" ]]; then
|
||||||
|
# mv "${log}.${prev}.gz" "${log}.${num}.gz"
|
||||||
|
# fi
|
||||||
|
# num=${prev}
|
||||||
|
# done
|
||||||
|
# mv "${log}" "${log}.${num}"
|
||||||
|
# gzip -9 "${log}.${num}"
|
||||||
|
# fi
|
||||||
|
#}
|
||||||
|
#
|
||||||
|
#
|
||||||
|
|
||||||
|
#
|
||||||
|
# Another example: finding java
|
||||||
|
#
|
||||||
|
# By default, Hadoop assumes that $JAVA_HOME is always defined
|
||||||
|
# outside of its configuration. Eons ago, Apple standardized
|
||||||
|
# on a helper program called java_home to find it for you.
|
||||||
|
#
|
||||||
|
#function hadoop_java_setup
|
||||||
|
#{
|
||||||
|
#
|
||||||
|
# if [[ -z "${JAVA_HOME}" ]]; then
|
||||||
|
# case $HADOOP_OS_TYPE in
|
||||||
|
# Darwin*)
|
||||||
|
# JAVA_HOME=$(/usr/libexec/java_home)
|
||||||
|
# ;;
|
||||||
|
# esac
|
||||||
|
# fi
|
||||||
|
#
|
||||||
|
# # Bail if we did not detect it
|
||||||
|
# if [[ -z "${JAVA_HOME}" ]]; then
|
||||||
|
# echo "ERROR: JAVA_HOME is not set and could not be found." 1>&2
|
||||||
|
# exit 1
|
||||||
|
# fi
|
||||||
|
#
|
||||||
|
# if [[ ! -d "${JAVA_HOME}" ]]; then
|
||||||
|
# echo "ERROR: JAVA_HOME (${JAVA_HOME}) does not exist." 1>&2
|
||||||
|
# exit 1
|
||||||
|
# fi
|
||||||
|
#
|
||||||
|
# JAVA="${JAVA_HOME}/bin/java"
|
||||||
|
#
|
||||||
|
# if [[ ! -x ${JAVA} ]]; then
|
||||||
|
# echo "ERROR: ${JAVA} is not executable." 1>&2
|
||||||
|
# exit 1
|
||||||
|
# fi
|
||||||
|
# JAVA_HEAP_MAX=-Xmx1g
|
||||||
|
# HADOOP_HEAPSIZE=${HADOOP_HEAPSIZE:-128}
|
||||||
|
#
|
||||||
|
# # check envvars which might override default args
|
||||||
|
# if [[ -n "$HADOOP_HEAPSIZE" ]]; then
|
||||||
|
# JAVA_HEAP_MAX="-Xmx${HADOOP_HEAPSIZE}m"
|
||||||
|
# fi
|
||||||
|
#}
|
|
@ -20,8 +20,11 @@
|
||||||
|
|
||||||
function hadoop_subproject_init
|
function hadoop_subproject_init
|
||||||
{
|
{
|
||||||
|
if [[ -z "${HADOOP_HDFS_ENV_PROCESSED}" ]]; then
|
||||||
if [[ -e "${HADOOP_CONF_DIR}/hdfs-env.sh" ]]; then
|
if [[ -e "${HADOOP_CONF_DIR}/hdfs-env.sh" ]]; then
|
||||||
. "${HADOOP_CONF_DIR}/hdfs-env.sh"
|
. "${HADOOP_CONF_DIR}/hdfs-env.sh"
|
||||||
|
export HADOOP_HDFS_ENV_PROCESSED=true
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# at some point in time, someone thought it would be a good idea to
|
# at some point in time, someone thought it would be a good idea to
|
||||||
|
|
|
@ -20,8 +20,11 @@
|
||||||
|
|
||||||
function hadoop_subproject_init
|
function hadoop_subproject_init
|
||||||
{
|
{
|
||||||
|
if [[ -z "${HADOOP_MAPRED_ENV_PROCESSED}" ]]; then
|
||||||
if [[ -e "${HADOOP_CONF_DIR}/mapred-env.sh" ]]; then
|
if [[ -e "${HADOOP_CONF_DIR}/mapred-env.sh" ]]; then
|
||||||
. "${HADOOP_CONF_DIR}/mapred-env.sh"
|
. "${HADOOP_CONF_DIR}/mapred-env.sh"
|
||||||
|
export HADOOP_MAPRED_ENV_PROCESSED=true
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# at some point in time, someone thought it would be a good idea to
|
# at some point in time, someone thought it would be a good idea to
|
||||||
|
|
|
@ -24,11 +24,14 @@ function hadoop_subproject_init
|
||||||
# ...
|
# ...
|
||||||
# this should get deprecated at some point.
|
# this should get deprecated at some point.
|
||||||
|
|
||||||
|
if [[ -z "${HADOOP_YARN_ENV_PROCESSED}" ]]; then
|
||||||
if [[ -e "${YARN_CONF_DIR}/yarn-env.sh" ]]; then
|
if [[ -e "${YARN_CONF_DIR}/yarn-env.sh" ]]; then
|
||||||
. "${YARN_CONF_DIR}/yarn-env.sh"
|
. "${YARN_CONF_DIR}/yarn-env.sh"
|
||||||
elif [[ -e "${HADOOP_CONF_DIR}/yarn-env.sh" ]]; then
|
elif [[ -e "${HADOOP_CONF_DIR}/yarn-env.sh" ]]; then
|
||||||
. "${HADOOP_CONF_DIR}/yarn-env.sh"
|
. "${HADOOP_CONF_DIR}/yarn-env.sh"
|
||||||
fi
|
fi
|
||||||
|
export HADOOP_YARN_ENV_PROCESSED=true
|
||||||
|
fi
|
||||||
|
|
||||||
if [[ -n "${YARN_CONF_DIR}" ]]; then
|
if [[ -n "${YARN_CONF_DIR}" ]]; then
|
||||||
HADOOP_CONF_DIR="${YARN_CONF_DIR}"
|
HADOOP_CONF_DIR="${YARN_CONF_DIR}"
|
||||||
|
|
Loading…
Reference in New Issue