2009-11-14 09:11:53 -05:00
|
|
|
#!/bin/sh
|
|
|
|
|
|
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
|
|
# this work for additional information regarding copyright ownership.
|
|
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
|
|
# (the "License"); you may not use this file except in compliance with
|
|
|
|
# the License. You may obtain a copy of the License at
|
|
|
|
#
|
|
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
#
|
|
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# See the License for the specific language governing permissions and
|
|
|
|
# limitations under the License.
|
|
|
|
|
|
|
|
# Create a Hbase AMI. Runs on the EC2 instance.
|
|
|
|
|
|
|
|
# Import variables
|
|
|
|
bin=`dirname "$0"`
|
|
|
|
bin=`cd "$bin"; pwd`
|
|
|
|
. "$bin"/hbase-ec2-env.sh
|
|
|
|
|
2009-11-26 03:00:02 -05:00
|
|
|
type=$INSTANCE_TYPE
|
|
|
|
[ -z "$type" ] && type=$SLAVE_INSTANCE_TYPE
|
|
|
|
arch=$ARCH
|
|
|
|
[ -z "$arch" ] && arch=$SLAVE_ARCH
|
|
|
|
|
|
|
|
echo "Remote: INSTANCE_TYPE is $type"
|
|
|
|
echo "Remote: ARCH is $arch"
|
2009-11-14 09:11:53 -05:00
|
|
|
|
2009-12-02 00:25:12 -05:00
|
|
|
# Remove sensitive information
|
2009-11-14 09:11:53 -05:00
|
|
|
rm -f "$bin"/hbase-ec2-env.sh
|
2009-12-02 00:25:12 -05:00
|
|
|
rm -f "$bin"/credentials.sh
|
2009-11-14 09:11:53 -05:00
|
|
|
|
|
|
|
# Install Java
|
|
|
|
echo "Downloading and installing java binary."
|
|
|
|
cd /usr/local
|
2010-01-12 13:13:20 -05:00
|
|
|
wget -nv -O java.bin http://hbase.s3.amazonaws.com/jdk/jdk-${JAVA_VERSION}-linux-${arch}.bin
|
2009-11-14 09:11:53 -05:00
|
|
|
sh java.bin
|
|
|
|
rm -f java.bin
|
|
|
|
|
|
|
|
# Install tools
|
|
|
|
echo "Installing rpms."
|
|
|
|
yum -y update
|
2010-01-16 04:05:34 -05:00
|
|
|
yum -y install rsync lynx screen ganglia-gmetad ganglia-gmond ganglia-web httpd php lzo-devel xfsprogs
|
2009-11-14 09:11:53 -05:00
|
|
|
yum -y clean all
|
|
|
|
|
|
|
|
# Install Hadoop
|
|
|
|
echo "Installing Hadoop $HADOOP_VERSION."
|
|
|
|
cd /usr/local
|
|
|
|
wget -nv http://archive.apache.org/dist/hadoop/core/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz
|
|
|
|
[ ! -f hadoop-$HADOOP_VERSION.tar.gz ] && wget -nv http://www.apache.org/dist/hadoop/core/hadoop-$HADOOP_VERSION/hadoop-$HADOOP_VERSION.tar.gz
|
|
|
|
tar xzf hadoop-$HADOOP_VERSION.tar.gz
|
|
|
|
rm -f hadoop-$HADOOP_VERSION.tar.gz
|
|
|
|
|
|
|
|
# Configure Hadoop
|
|
|
|
sed -i \
|
|
|
|
-e "s|# export JAVA_HOME=.*|export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}|" \
|
|
|
|
-e 's|# export HADOOP_LOG_DIR=.*|export HADOOP_LOG_DIR=/mnt/hadoop/logs|' \
|
|
|
|
-e 's|# export HADOOP_SLAVE_SLEEP=.*|export HADOOP_SLAVE_SLEEP=1|' \
|
|
|
|
-e 's|# export HADOOP_OPTS=.*|export HADOOP_OPTS=-server|' \
|
|
|
|
/usr/local/hadoop-$HADOOP_VERSION/conf/hadoop-env.sh
|
|
|
|
|
|
|
|
# Install HBase
|
|
|
|
echo "Installing HBase $HBASE_VERSION."
|
|
|
|
cd /usr/local
|
2010-01-12 13:13:20 -05:00
|
|
|
wget -nv http://hbase.s3.amazonaws.com/hbase/hbase-$HBASE_VERSION.tar.gz
|
2009-11-14 09:11:53 -05:00
|
|
|
tar xzf hbase-$HBASE_VERSION.tar.gz
|
|
|
|
rm -f hbase-$HBASE_VERSION.tar.gz
|
|
|
|
|
|
|
|
# Configure HBase
|
|
|
|
sed -i \
|
|
|
|
-e "s|# export JAVA_HOME=.*|export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}|" \
|
2009-12-31 15:48:12 -05:00
|
|
|
-e 's|# export HBASE_OPTS=.*|export HBASE_OPTS="$HBASE_OPTS -server -XX:+HeapDumpOnOutOfMemoryError"|' \
|
2009-11-14 09:11:53 -05:00
|
|
|
-e 's|# export HBASE_LOG_DIR=.*|export HBASE_LOG_DIR=/mnt/hbase/logs|' \
|
|
|
|
-e 's|# export HBASE_SLAVE_SLEEP=.*|export HBASE_SLAVE_SLEEP=1|' \
|
|
|
|
/usr/local/hbase-$HBASE_VERSION/conf/hbase-env.sh
|
|
|
|
|
|
|
|
# Run user data as script on instance startup
|
|
|
|
chmod +x /etc/init.d/ec2-run-user-data
|
|
|
|
echo "/etc/init.d/ec2-run-user-data" >> /etc/rc.d/rc.local
|
|
|
|
|
|
|
|
# Setup root user bash environment
|
2009-12-31 15:48:12 -05:00
|
|
|
|
|
|
|
echo "export JAVA_HOME=/usr/local/jdk${JAVA_VERSION}" >> /root/.bash_profile
|
2009-11-14 09:11:53 -05:00
|
|
|
echo "export HADOOP_HOME=/usr/local/hadoop-${HADOOP_VERSION}" >> /root/.bash_profile
|
|
|
|
echo "export HBASE_HOME=/usr/local/hbase-${HBASE_VERSION}" >> /root/.bash_profile
|
|
|
|
echo 'export PATH=$JAVA_HOME/bin:$HADOOP_HOME/bin:$HBASE_HOME/bin:$PATH' >> /root/.bash_profile
|
|
|
|
|
|
|
|
# Configure networking.
|
|
|
|
# Delete SSH authorized_keys since it includes the key it was launched with. (Note that it is re-populated when an instance starts.)
|
|
|
|
rm -f /root/.ssh/authorized_keys
|
|
|
|
# Ensure logging in to new hosts is seamless.
|
|
|
|
echo ' StrictHostKeyChecking no' >> /etc/ssh/ssh_config
|
|
|
|
|
2009-12-18 18:28:00 -05:00
|
|
|
# Install LZO
|
|
|
|
echo "Installing LZO codec support"
|
2010-01-12 13:13:20 -05:00
|
|
|
wget -nv -O /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz http://hbase.s3.amazonaws.com/hbase/lzo-linux-${HADOOP_VERSION}.tar.gz
|
2009-12-18 18:28:00 -05:00
|
|
|
cd /usr/local/hadoop-${HADOOP_VERSION} && tar xzf /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz
|
|
|
|
cd /usr/local/hbase-${HBASE_VERSION} && tar xzf /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz
|
|
|
|
rm -f /tmp/lzo-linux-${HADOOP_VERSION}.tar.gz
|
2009-11-26 03:00:02 -05:00
|
|
|
|
2009-11-14 09:11:53 -05:00
|
|
|
# Bundle and upload image
|
|
|
|
cd ~root
|
|
|
|
# Don't need to delete .bash_history since it isn't written until exit.
|
|
|
|
df -h
|
2009-11-26 03:00:02 -05:00
|
|
|
ec2-bundle-vol -d /mnt -k /mnt/pk*.pem -c /mnt/cert*.pem -u $AWS_ACCOUNT_ID -s 3072 -p hbase-$HBASE_VERSION-$arch -r $arch
|
2009-11-14 09:11:53 -05:00
|
|
|
|
2009-11-26 03:00:02 -05:00
|
|
|
ec2-upload-bundle -b $S3_BUCKET -m /mnt/hbase-$HBASE_VERSION-$arch.manifest.xml -a $AWS_ACCESS_KEY_ID -s $AWS_SECRET_ACCESS_KEY
|
2009-11-14 09:11:53 -05:00
|
|
|
|
|
|
|
# End
|
|
|
|
echo Done
|