Merge -r 1170232:1170233 from trunk to branch. Fixes: HDFS-2323.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-0.23@1170241 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2011-09-13 17:05:02 +00:00
parent 1c3e1213e6
commit 3f6ae51a7a
6 changed files with 13 additions and 9 deletions

View File

@ -1007,6 +1007,7 @@ Release 0.23.0 - Unreleased
HDFS-2314. MRV1 test compilation broken after HDFS-2197 (todd)
HDFS-2323. start-dfs.sh script fails for tarball install (tomwhite)
BREAKDOWN OF HDFS-1073 SUBTASKS

View File

@ -51,7 +51,7 @@ NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -namenodes)
echo "Starting namenodes on [$NAMENODES]"
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--hostnames "$NAMENODES" \
--script "$bin/hdfs" start namenode $nameStartOpt
@ -64,7 +64,7 @@ if [ -n "$HADOOP_SECURE_DN_USER" ]; then
"Attempting to start secure cluster, skipping datanodes. " \
"Run start-secure-dns.sh as root to complete startup."
else
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--script "$bin/hdfs" start datanode $dataStartOpt
fi
@ -84,7 +84,7 @@ if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
else
echo "Starting secondary namenodes [$SECONDARY_NAMENODES]"
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--hostnames "$SECONDARY_NAMENODES" \
--script "$bin/hdfs" start secondarynamenode

View File

@ -25,7 +25,7 @@ bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
"$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
else
echo $usage
fi

View File

@ -27,7 +27,7 @@ NAMENODES=$($HADOOP_PREFIX/bin/hdfs getconf -namenodes)
echo "Stopping namenodes on [$NAMENODES]"
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--hostnames "$NAMENODES" \
--script "$bin/hdfs" stop namenode
@ -40,7 +40,7 @@ if [ -n "$HADOOP_SECURE_DN_USER" ]; then
"Attempting to stop secure cluster, skipping datanodes. " \
"Run stop-secure-dns.sh as root to complete shutdown."
else
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--script "$bin/hdfs" stop datanode
fi
@ -60,7 +60,7 @@ if [ "$SECONDARY_NAMENODES" = '0.0.0.0' ] ; then
else
echo "Stopping secondary namenodes [$SECONDARY_NAMENODES]"
"$HADOOP_PREFIX/bin/hadoop-daemons.sh" \
"$HADOOP_PREFIX/sbin/hadoop-daemons.sh" \
--config "$HADOOP_CONF_DIR" \
--hostnames "$SECONDARY_NAMENODES" \
--script "$bin/hdfs" stop secondarynamenode

View File

@ -25,7 +25,7 @@ bin=`cd "$bin"; pwd`
. "$bin"/../libexec/hdfs-config.sh
if [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
"$HADOOP_PREFIX"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode
"$HADOOP_PREFIX"/sbin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs stop datanode
else
echo $usage
fi

View File

@ -227,7 +227,10 @@ public class GetConf extends Configured implements Tool {
void printList(List<InetSocketAddress> list) {
StringBuilder buffer = new StringBuilder();
for (InetSocketAddress address : list) {
buffer.append(address.getHostName()).append(" ");
if (buffer.length() > 0) {
buffer.append(" ");
}
buffer.append(address.getHostName());
}
printOut(buffer.toString());
}