Merge -r 1203451:1203452 from trunk to branch-0.23. Fixes: HDFS-2544

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1203453 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2011-11-18 00:54:28 +00:00
parent 3c49ccfa06
commit ab36c671c0
11 changed files with 34 additions and 11 deletions

View File

@ -9,6 +9,9 @@ Release 0.23.1 - UNRELEASED
IMPROVEMENTS IMPROVEMENTS
HDFS-2560. Refactor BPOfferService to be a static inner class (todd) HDFS-2560. Refactor BPOfferService to be a static inner class (todd)
HDFS-2544. Hadoop scripts unconditionally source
"$bin"/../libexec/hadoop-config.sh. (Bruno Mahé via tomwhite)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-2130. Switch default checksum to CRC32C. (todd) HDFS-2130. Switch default checksum to CRC32C. (todd)

View File

@ -36,7 +36,9 @@
bin=`dirname "$0"` bin=`dirname "$0"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
. "$bin/../libexec/hdfs-config.sh" DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh
if [ "$1" = '' ] ; then if [ "$1" = '' ] ; then
"Error: please specify local exclude file as a first argument" "Error: please specify local exclude file as a first argument"

View File

@ -19,7 +19,9 @@ bin=`which $0`
bin=`dirname ${bin}` bin=`dirname ${bin}`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh
function print_usage(){ function print_usage(){
echo "Usage: hdfs [--config confdir] COMMAND" echo "Usage: hdfs [--config confdir] COMMAND"

View File

@ -24,8 +24,10 @@ bin=`cd "$bin"; pwd`
export HADOOP_PREFIX="${HADOOP_PREFIX:-$bin/..}" export HADOOP_PREFIX="${HADOOP_PREFIX:-$bin/..}"
if [ -e "$bin/../libexec/hadoop-config.sh" ]; then DEFAULT_LIBEXEC_DIR="$bin"/../libexec
. $bin/../libexec/hadoop-config.sh HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
if [ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]; then
. ${HADOOP_LIBEXEC_DIR}/hadoop-config.sh
elif [ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]; then elif [ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]; then
. "$HADOOP_COMMON_HOME"/libexec/hadoop-config.sh . "$HADOOP_COMMON_HOME"/libexec/hadoop-config.sh
elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then elif [ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]; then

View File

@ -23,7 +23,9 @@
bin=`dirname "$0"` bin=`dirname "$0"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
. "$bin/../libexec/hdfs-config.sh" DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh
namenodes=$("$HADOOP_PREFIX/bin/hdfs" getconf -nnRpcAddresses) namenodes=$("$HADOOP_PREFIX/bin/hdfs" getconf -nnRpcAddresses)
if [ "$?" != '0' ] ; then errorFlag='1' ; if [ "$?" != '0' ] ; then errorFlag='1' ;

View File

@ -18,7 +18,9 @@
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh
# Start balancer daemon. # Start balancer daemon.

View File

@ -25,7 +25,9 @@ usage="Usage: start-dfs.sh [-upgrade|-rollback]"
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh
# get arguments # get arguments
if [ $# -ge 1 ]; then if [ $# -ge 1 ]; then

View File

@ -22,7 +22,9 @@ usage="Usage (run as root in order to start secure datanodes): start-secure-dns.
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
"$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt "$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt

View File

@ -18,7 +18,9 @@
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh
# Stop balancer daemon. # Stop balancer daemon.
# Run this on the machine where the balancer is running # Run this on the machine where the balancer is running

View File

@ -18,7 +18,9 @@
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh
#--------------------------------------------------------- #---------------------------------------------------------
# namenodes # namenodes

View File

@ -22,7 +22,9 @@ usage="Usage (run as root in order to stop secure datanodes): stop-secure-dns.sh
bin=`dirname "${BASH_SOURCE-$0}"` bin=`dirname "${BASH_SOURCE-$0}"`
bin=`cd "$bin"; pwd` bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh DEFAULT_LIBEXEC_DIR="$bin"/../libexec
HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
. $HADOOP_LIBEXEC_DIR/hdfs-config.sh
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
"$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode "$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode