2007-08-22 22:19:18 -04:00
|
|
|
#! /usr/bin/env bash
|
2007-07-12 18:08:25 -04:00
|
|
|
#
|
|
|
|
#/**
|
|
|
|
# * Copyright 2007 The Apache Software Foundation
|
|
|
|
# *
|
|
|
|
# * Licensed to the Apache Software Foundation (ASF) under one
|
|
|
|
# * or more contributor license agreements. See the NOTICE file
|
|
|
|
# * distributed with this work for additional information
|
|
|
|
# * regarding copyright ownership. The ASF licenses this file
|
|
|
|
# * to you under the Apache License, Version 2.0 (the
|
|
|
|
# * "License"); you may not use this file except in compliance
|
|
|
|
# * with the License. You may obtain a copy of the License at
|
|
|
|
# *
|
|
|
|
# * http://www.apache.org/licenses/LICENSE-2.0
|
|
|
|
# *
|
|
|
|
# * Unless required by applicable law or agreed to in writing, software
|
|
|
|
# * distributed under the License is distributed on an "AS IS" BASIS,
|
|
|
|
# * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
|
|
# * See the License for the specific language governing permissions and
|
|
|
|
# * limitations under the License.
|
|
|
|
# */
|
2007-05-07 15:58:53 -04:00
|
|
|
#
|
|
|
|
# The hbase command script. Based on the hadoop command script putting
|
|
|
|
# in hbase classes, libs and configurations ahead of hadoop's.
|
|
|
|
#
|
|
|
|
# TODO: Narrow the amount of duplicated code.
|
|
|
|
#
|
|
|
|
# Environment Variables:
|
|
|
|
#
|
|
|
|
# JAVA_HOME The java implementation to use. Overrides JAVA_HOME.
|
|
|
|
#
|
|
|
|
# HBASE_HEAPSIZE The maximum amount of heap to use, in MB.
|
|
|
|
# Default is 1000.
|
|
|
|
#
|
|
|
|
# HBASE_OPTS Extra Java runtime options.
|
|
|
|
#
|
|
|
|
# HBASE_CONF_DIR Alternate conf dir. Default is ${HBASE_HOME}/conf.
|
|
|
|
#
|
|
|
|
# HADOOP_CONF_DIR Alternate conf dir. Default is ${HADOOP_HOME}/conf.
|
|
|
|
#
|
|
|
|
# HADOOP_HOME Hadoop home directory.
|
|
|
|
#
|
|
|
|
# HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
|
|
|
|
#
|
|
|
|
|
|
|
|
bin=`dirname "$0"`
|
|
|
|
bin=`cd "$bin"; pwd`
|
|
|
|
|
|
|
|
# This will set HBASE_HOME, HADOOP_HOME, etc.
|
|
|
|
. "$bin"/hbase-config.sh
|
|
|
|
|
|
|
|
cygwin=false
|
|
|
|
case "`uname`" in
|
|
|
|
CYGWIN*) cygwin=true;;
|
|
|
|
esac
|
|
|
|
|
|
|
|
# if no args specified, show usage
|
|
|
|
if [ $# = 0 ]; then
|
|
|
|
echo "Usage: hbase [--hadoop=hadoopdir] <command>"
|
|
|
|
echo "where <command> is one of:"
|
2008-01-12 23:46:44 -05:00
|
|
|
echo " shell run the Hbase shell"
|
|
|
|
echo " master run an Hbase HMaster node"
|
|
|
|
echo " regionserver run an Hbase HRegionServer node"
|
|
|
|
echo " rest run an Hbase REST server"
|
|
|
|
echo " thrift run an Hbase Thrift server"
|
2008-01-22 22:24:32 -05:00
|
|
|
echo " migrate upgrade an hbase.rootdir"
|
2007-05-07 15:58:53 -04:00
|
|
|
echo " or"
|
|
|
|
echo " CLASSNAME run the class named CLASSNAME"
|
|
|
|
echo "Most commands print help when invoked w/o parameters."
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
# get arguments
|
|
|
|
COMMAND=$1
|
|
|
|
shift
|
|
|
|
|
|
|
|
# Source the hadoop-env.sh. Will have JAVA_HOME defined. There is no
|
|
|
|
# hbase-env.sh as yet.
|
|
|
|
if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
|
|
|
|
. "${HADOOP_CONF_DIR}/hadoop-env.sh"
|
|
|
|
fi
|
|
|
|
|
|
|
|
# some Java parameters
|
|
|
|
if [ "$JAVA_HOME" != "" ]; then
|
|
|
|
#echo "run java in $JAVA_HOME"
|
|
|
|
JAVA_HOME=$JAVA_HOME
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ "$JAVA_HOME" = "" ]; then
|
|
|
|
echo "Error: JAVA_HOME is not set."
|
|
|
|
exit 1
|
|
|
|
fi
|
|
|
|
|
|
|
|
JAVA=$JAVA_HOME/bin/java
|
|
|
|
JAVA_HEAP_MAX=-Xmx1000m
|
|
|
|
|
|
|
|
# check envvars which might override default args
|
|
|
|
if [ "$HBASE_HEAPSIZE" != "" ]; then
|
|
|
|
#echo "run with heapsize $HBASE_HEAPSIZE"
|
|
|
|
JAVA_HEAP_MAX="-Xmx""$HBASE_HEAPSIZE""m"
|
|
|
|
#echo $JAVA_HEAP_MAX
|
|
|
|
fi
|
|
|
|
|
|
|
|
# CLASSPATH initially contains $HBASE_CONF_DIR
|
|
|
|
# Add HADOOP_CONF_DIR if its been defined.
|
|
|
|
if [ ! "$HADOOP_CONF_DIR" = "" ]; then
|
|
|
|
CLASSPATH="${CLASSPATH}:${HADOOP_CONF_DIR}"
|
|
|
|
fi
|
HADOOP-1519 map/reduce interface for HBase
AbstractMergeTestBase, HBaseTestCase: move createNewHRegion to HBaseTestCase
MiniHBaseCluster: add deleteOnExit, getDFSCluster, fix Javadoc
TestScanner2: moved KeyedData to org.apache.hadoop.hbase.io
TestTableMapReduce: new test case to test map/reduce interface to HBase
hbase-site.xml: change hbase.client.pause from 3 to 5 seconds, hbase.client.retries.number to 5 so that tests will not time out or run out of retries
HClient: moved KeyedData to org.apache.hadoop.hbase.io, fix javadoc, add method getStartKeys
HMaster: moved KeyedData to org.apache.hadoop.hbase.io, remove unused variables, remove extraneous throws clause,
HRegionInterface, HRegionServer: moved KeyedData to org.apache.hadoop.hbase.io
KeyedData: moved KeyedData to org.apache.hadoop.hbase.io
KeyedDataArrayWritable: new class to support HBase map/reduce
org.apache.hadoop.hbase.mapred: new classes for map/reduce
- GroupingTableMap
- IdentityTableMap
- IdentityTableReduce
- TableInputFormat
- TableMap
- TableOutputCollector
- TableOutputFormat
- TableReduce
- TableSplit
hbase/bin/hbase: changes for map/reduce
git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@552127 13f79535-47bb-0310-9956-ffa450edef68
2007-06-30 07:11:32 -04:00
|
|
|
CLASSPATH="${CLASSPATH}:${HBASE_CONF_DIR}"
|
2007-05-07 15:58:53 -04:00
|
|
|
CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
|
|
|
|
|
|
|
|
# for developers, add hbase and hadoop classes to CLASSPATH
|
|
|
|
if [ -d "$HADOOP_HOME/build/contrib/hbase/classes" ]; then
|
|
|
|
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/contrib/hbase/classes
|
|
|
|
fi
|
2007-05-24 13:12:00 -04:00
|
|
|
if [ -d "$HADOOP_HOME/build/contrib/hbase/test" ]; then
|
|
|
|
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/contrib/hbase/test
|
|
|
|
fi
|
2007-05-07 15:58:53 -04:00
|
|
|
if [ -d "$HADOOP_HOME/build/classes" ]; then
|
|
|
|
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
|
|
|
|
fi
|
2007-10-05 23:09:50 -04:00
|
|
|
if [ -d "$HADOOP_HOME/build/contrib/hbase/webapps" ]; then
|
|
|
|
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/contrib/hbase/
|
|
|
|
fi
|
2007-05-07 15:58:53 -04:00
|
|
|
if [ -d "$HADOOP_HOME/build/webapps" ]; then
|
|
|
|
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
|
|
|
|
fi
|
|
|
|
if [ -d "$HADOOP_HOME/build/test/classes" ]; then
|
|
|
|
CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
|
|
|
|
fi
|
|
|
|
|
|
|
|
# so that filenames w/ spaces are handled correctly in loops below
|
|
|
|
IFS=
|
|
|
|
|
|
|
|
# for releases, add core hbase, hadoop jar & webapps to CLASSPATH
|
2007-09-19 12:45:01 -04:00
|
|
|
# Look in two places for our hbase jar.
|
|
|
|
for f in $HBASE_HOME/hadoop-*-hbase*.jar; do
|
|
|
|
if [ -f $f ]; then
|
|
|
|
CLASSPATH=${CLASSPATH}:$f;
|
|
|
|
fi
|
|
|
|
done
|
2007-08-29 00:30:58 -04:00
|
|
|
for f in $HADOOP_HOME/contrib/hadoop-*-hbase*.jar; do
|
2007-09-19 12:45:01 -04:00
|
|
|
if [ -f $f ]; then
|
|
|
|
CLASSPATH=${CLASSPATH}:$f;
|
|
|
|
fi
|
2007-05-07 15:58:53 -04:00
|
|
|
done
|
|
|
|
if [ -d "$HADOOP_HOME/webapps" ]; then
|
|
|
|
CLASSPATH=${CLASSPATH}:$HADOOP_HOME
|
|
|
|
fi
|
HADOOP-1519 map/reduce interface for HBase
AbstractMergeTestBase, HBaseTestCase: move createNewHRegion to HBaseTestCase
MiniHBaseCluster: add deleteOnExit, getDFSCluster, fix Javadoc
TestScanner2: moved KeyedData to org.apache.hadoop.hbase.io
TestTableMapReduce: new test case to test map/reduce interface to HBase
hbase-site.xml: change hbase.client.pause from 3 to 5 seconds, hbase.client.retries.number to 5 so that tests will not time out or run out of retries
HClient: moved KeyedData to org.apache.hadoop.hbase.io, fix javadoc, add method getStartKeys
HMaster: moved KeyedData to org.apache.hadoop.hbase.io, remove unused variables, remove extraneous throws clause,
HRegionInterface, HRegionServer: moved KeyedData to org.apache.hadoop.hbase.io
KeyedData: moved KeyedData to org.apache.hadoop.hbase.io
KeyedDataArrayWritable: new class to support HBase map/reduce
org.apache.hadoop.hbase.mapred: new classes for map/reduce
- GroupingTableMap
- IdentityTableMap
- IdentityTableReduce
- TableInputFormat
- TableMap
- TableOutputCollector
- TableOutputFormat
- TableReduce
- TableSplit
hbase/bin/hbase: changes for map/reduce
git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@552127 13f79535-47bb-0310-9956-ffa450edef68
2007-06-30 07:11:32 -04:00
|
|
|
for f in $HADOOP_HOME/hadoop-*.jar; do
|
2007-05-07 15:58:53 -04:00
|
|
|
CLASSPATH=${CLASSPATH}:$f;
|
|
|
|
done
|
|
|
|
|
|
|
|
# add hbase and hadoop libs to CLASSPATH
|
|
|
|
for f in $HBASE_HOME/lib/*.jar; do
|
|
|
|
CLASSPATH=${CLASSPATH}:$f;
|
|
|
|
done
|
|
|
|
for f in $HADOOP_HOME/lib/*.jar; do
|
|
|
|
CLASSPATH=${CLASSPATH}:$f;
|
|
|
|
done
|
|
|
|
|
|
|
|
for f in $HADOOP_HOME/lib/jetty-ext/*.jar; do
|
|
|
|
CLASSPATH=${CLASSPATH}:$f;
|
|
|
|
done
|
|
|
|
|
|
|
|
|
|
|
|
# default log directory & file
|
|
|
|
# TODO: Should we log to hadoop or under hbase?
|
|
|
|
if [ "$HADOOP_LOG_DIR" = "" ]; then
|
|
|
|
HADOOP_LOG_DIR="$HADOOP_HOME/logs"
|
|
|
|
fi
|
|
|
|
if [ "$HADOOP_LOGFILE" = "" ]; then
|
|
|
|
HADOOP_LOGFILE='hbase.log'
|
|
|
|
fi
|
|
|
|
|
|
|
|
# cygwin path translation
|
|
|
|
if $cygwin; then
|
|
|
|
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
|
|
|
|
HADOOP_HOME=`cygpath -d "$HADOOP_HOME"`
|
|
|
|
HBASE_HOME=`cygpath -d "$HBASE_HOME"`
|
|
|
|
HADOOP_LOG_DIR=`cygpath -d "$HADOOP_LOG_DIR"`
|
|
|
|
fi
|
|
|
|
|
|
|
|
# TODO: Can this be put into separate script so don't have to duplicate
|
|
|
|
# hadoop command script code?
|
|
|
|
# setup 'java.library.path' for native-hadoop code if necessary
|
|
|
|
JAVA_LIBRARY_PATH=''
|
|
|
|
if [ -d "${HADOOP_HOME}/build/native" -o -d "${HADOOP_HOME}/lib/native" ]; then
|
|
|
|
JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
|
|
|
|
|
|
|
|
if [ -d "$HADOOP_HOME/build/native" ]; then
|
|
|
|
JAVA_LIBRARY_PATH=${HADOOP_HOME}/build/native/${JAVA_PLATFORM}/lib
|
|
|
|
fi
|
|
|
|
|
|
|
|
if [ -d "${HADOOP_HOME}/lib/native" ]; then
|
|
|
|
if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
|
|
|
|
JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
|
|
|
|
else
|
|
|
|
JAVA_LIBRARY_PATH=${HADOOP_HOME}/lib/native/${JAVA_PLATFORM}
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
fi
|
|
|
|
|
|
|
|
# cygwin path translation
|
|
|
|
if $cygwin; then
|
|
|
|
JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
|
|
|
|
fi
|
|
|
|
|
|
|
|
# restore ordinary behaviour
|
|
|
|
unset IFS
|
|
|
|
|
|
|
|
# figure out which class to run
|
2007-07-11 17:54:15 -04:00
|
|
|
if [ "$COMMAND" = "shell" ] ; then
|
|
|
|
CLASS='org.apache.hadoop.hbase.Shell'
|
2007-05-07 15:58:53 -04:00
|
|
|
elif [ "$COMMAND" = "master" ] ; then
|
|
|
|
CLASS='org.apache.hadoop.hbase.HMaster'
|
|
|
|
elif [ "$COMMAND" = "regionserver" ] ; then
|
|
|
|
CLASS='org.apache.hadoop.hbase.HRegionServer'
|
2008-01-12 23:46:44 -05:00
|
|
|
elif [ "$COMMAND" = "rest" ] ; then
|
|
|
|
CLASS='org.apache.hadoop.hbase.rest.Dispatcher'
|
|
|
|
elif [ "$COMMAND" = "thrift" ] ; then
|
|
|
|
CLASS='org.apache.hadoop.hbase.thrift.ThriftServer'
|
2008-01-22 22:24:32 -05:00
|
|
|
elif [ "$COMMAND" = "migrate" ] ; then
|
|
|
|
CLASS='org.apache.hadoop.hbase.util.Migrate'
|
2007-05-07 15:58:53 -04:00
|
|
|
else
|
|
|
|
CLASS=$COMMAND
|
|
|
|
fi
|
|
|
|
|
2007-09-15 11:14:53 -04:00
|
|
|
# Have JVM dump heap if we run out of memory. Files will be 'launch directory'
|
|
|
|
# and are named like the following: java_pid21612.hprof. Apparently it doesn't
|
|
|
|
# 'cost' to have this flag enabled. Its a 1.6 flag only. See:
|
|
|
|
# http://blogs.sun.com/alanb/entry/outofmemoryerror_looks_a_bit_better
|
|
|
|
HBASE_OPTS="$HBASE_OPTS -XX:+HeapDumpOnOutOfMemoryError"
|
2007-05-07 15:58:53 -04:00
|
|
|
HBASE_OPTS="$HBASE_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
|
|
|
|
HBASE_OPTS="$HBASE_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
|
|
|
|
HBASE_OPTS="$HBASE_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
|
|
|
|
HBASE_OPTS="$HBASE_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
|
|
|
|
HBASE_OPTS="$HBASE_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
|
|
|
|
HBASE_OPTS="$HBASE_OPTS -Dhbase.home.dir=$HBASE_HOME"
|
|
|
|
if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
|
|
|
|
HBASE_OPTS="$HBASE_OPTS -Djava.library.path=$JAVA_LIBRARY_PATH"
|
|
|
|
fi
|
|
|
|
|
|
|
|
# run it
|
|
|
|
exec "$JAVA" $JAVA_HEAP_MAX $HBASE_OPTS -classpath "$CLASSPATH" $CLASS "$@"
|