2009-05-19 00:30:38 -04:00
|
|
|
#!/usr/bin/env bash
|
|
|
|
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
|
|
|
|
# Start hadoop dfs daemons.
|
|
|
|
# Optinally upgrade or rollback dfs state.
|
|
|
|
# Run this on master node.
|
|
|
|
|
|
|
|
usage="Usage: start-dfs.sh [-upgrade|-rollback]"
|
|
|
|
|
2010-04-29 19:23:53 -04:00
|
|
|
bin=`dirname "${BASH_SOURCE-$0}"`
|
2009-05-19 00:30:38 -04:00
|
|
|
bin=`cd "$bin"; pwd`
|
|
|
|
|
|
|
|
. "$bin"/hdfs-config.sh
|
|
|
|
|
|
|
|
# get arguments
|
|
|
|
if [ $# -ge 1 ]; then
|
|
|
|
nameStartOpt=$1
|
|
|
|
shift
|
|
|
|
case $nameStartOpt in
|
|
|
|
(-upgrade)
|
|
|
|
;;
|
|
|
|
(-rollback)
|
|
|
|
dataStartOpt=$nameStartOpt
|
|
|
|
;;
|
|
|
|
(*)
|
|
|
|
echo $usage
|
|
|
|
exit 1
|
|
|
|
;;
|
|
|
|
esac
|
|
|
|
fi
|
|
|
|
|
|
|
|
# start dfs daemons
|
|
|
|
# start namenode after datanodes, to minimize time namenode is up w/o data
|
|
|
|
# note: datanodes will log connection errors until namenode starts
|
|
|
|
"$HADOOP_CORE_HOME"/bin/hadoop-daemon.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start namenode $nameStartOpt
|
|
|
|
"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --script "$bin"/hdfs start datanode $dataStartOpt
|
|
|
|
"$HADOOP_CORE_HOME"/bin/hadoop-daemons.sh --config $HADOOP_CONF_DIR --hosts masters --script "$bin"/hdfs start secondarynamenode
|