HBASE-21536 Fix completebulkload usage instructions

Signed-off-by: Peter Somogyi <psomogyi@apache.org>
This commit is contained in:
Artem Ervits 2019-05-09 16:50:02 -04:00 committed by Peter Somogyi
parent 0e79e70545
commit 420fbba6ae
3 changed files with 41 additions and 35 deletions

View File

@ -115,6 +115,7 @@ if [ $# = 0 ]; then
echo " version Print the version"
echo " backup Backup tables for recovery"
echo " restore Restore tables from existing backup image"
echo " completebulkload Run BulkLoadHFiles tool"
echo " regionsplitter Run RegionSplitter tool"
echo " rowcounter Run RowCounter tool"
echo " cellcounter Run CellCounter tool"
@ -645,6 +646,8 @@ elif [ "$COMMAND" = "cellcounter" ] ; then
CLASS='org.apache.hadoop.hbase.mapreduce.CellCounter'
elif [ "$COMMAND" = "pre-upgrade" ] ; then
CLASS='org.apache.hadoop.hbase.tool.PreUpgradeValidator'
elif [ "$COMMAND" = "completebulkload" ] ; then
CLASS='org.apache.hadoop.hbase.tool.BulkLoadHFilesTool'
else
CLASS=$COMMAND
fi

View File

@ -182,13 +182,16 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
}
private void usage() {
System.err.println("usage: " + NAME + " /path/to/hfileoutputformat-output tablename -loadTable"
+ "\n -D" + CREATE_TABLE_CONF_KEY + "=no - can be used to avoid creation of table by "
+ "this tool\n Note: if you set this to 'no', then the target table must already exist "
+ "in HBase\n -loadTable implies your baseDirectory to store file has a depth of 3 ,you"
+ " must have an existing table\n-D" + IGNORE_UNMATCHED_CF_CONF_KEY + "=yes - can be used "
+ "to ignore unmatched column families\n" +
"\n");
System.err.println("usage: " + "bin/hbase completebulkload <-Dargs> "
+ "</path/to/hfileoutputformat-output> <tablename>\n"
+ "\t-D" + CREATE_TABLE_CONF_KEY + "=no can be used to avoid creation "
+ "of a table by this tool.\n"
+ "\t Note: if you set this to 'no', then target table must already exist.\n"
+ "\t-D" + IGNORE_UNMATCHED_CF_CONF_KEY + "=yes can be used to ignore "
+ "unmatched column families.\n"
+ "\t-loadTable switch implies your baseDirectory to store file has a "
+ "depth of 3, table must exist\n"
+ "\t and -loadTable switch is the last option on the command line.\n\n");
}
/**

View File

@ -841,7 +841,7 @@ $ bin/hbase org.apache.hadoop.hbase.tool.LoadIncrementalHFiles <hdfs://storefile
.Driver
----
HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase classpath` ${HADOOP_HOME}/bin/hadoop jar ${HBASE_HOME}/hbase-server-VERSION.jar completebulkload <hdfs://storefileoutput> <tablename>
HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase classpath` ${HADOOP_HOME}/bin/hadoop jar ${HBASE_HOME}/hbase-mapreduce-VERSION.jar completebulkload <hdfs://storefileoutput> <tablename>
----
[[completebulkload.warning]]