HBASE-17385 Change usage documentation from bin/hbase to hbase in various tools

Signed-off-by: Enis Soztutar <enis@apache.org>
This commit is contained in:
Jan Hentschel 2016-12-29 16:31:52 +01:00 committed by Enis Soztutar
parent 001a26d404
commit 7572e96e3a
19 changed files with 25 additions and 25 deletions

View File

@ -78,7 +78,7 @@ public class ZkAclReset extends Configured implements Tool {
}
private void printUsageAndExit() {
System.err.printf("Usage: bin/hbase %s [options]%n", getClass().getName());
System.err.printf("Usage: hbase %s [options]%n", getClass().getName());
System.err.println(" where [options] are:");
System.err.println(" -h|-help Show this help and exit.");
System.err.println(" -set-acls Setup the hbase znode ACLs for a secure cluster");

View File

@ -162,7 +162,7 @@ public abstract class AbstractHBaseTool implements Tool, Configurable {
}
protected void printUsage() {
printUsage("bin/hbase " + getClass().getName() + " <options>", "Options:", "");
printUsage("hbase " + getClass().getName() + " <options>", "Options:", "");
}
protected void printUsage(final String usageStr, final String usageHeader,

View File

@ -84,9 +84,9 @@ public class RESTServer implements Constants {
private static void printUsageAndExit(Options options, int exitCode) {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("bin/hbase rest start", "", options,
formatter.printHelp("hbase rest start", "", options,
"\nTo run the REST server as a daemon, execute " +
"bin/hbase-daemon.sh start|stop rest [--infoport <port>] [-p <port>] [-ro]\n", true);
"hbase-daemon.sh start|stop rest [--infoport <port>] [-p <port>] [-ro]\n", true);
System.exit(exitCode);
}

View File

@ -1406,7 +1406,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
System.err.println(" running: 1 <= value <= 500");
System.err.println("Examples:");
System.err.println(" To run a single evaluation client:");
System.err.println(" $ bin/hbase " + this.getClass().getName()
System.err.println(" $ hbase " + this.getClass().getName()
+ " sequentialWrite 1");
}

View File

@ -211,7 +211,7 @@ public class CopyTable extends Configured implements Tool {
System.err.println();
System.err.println("Examples:");
System.err.println(" To copy 'TestTable' to a cluster that uses replication for a 1 hour window:");
System.err.println(" $ bin/hbase " +
System.err.println(" $ hbase " +
"org.apache.hadoop.hbase.mapreduce.CopyTable --starttime=1265875194289 --endtime=1265878794289 " +
"--peer.adr=server1,server2,server3:2181:/hbase --families=myOldCf:myNewCf,cf2,cf3 TestTable ");
System.err.println("For performance consider the following general option:\n"

View File

@ -624,7 +624,7 @@ public class HashTable extends Configured implements Tool {
System.err.println();
System.err.println("Examples:");
System.err.println(" To hash 'TestTable' in 32kB batches for a 1 hour window into 50 files:");
System.err.println(" $ bin/hbase " +
System.err.println(" $ hbase " +
"org.apache.hadoop.hbase.mapreduce.HashTable --batchsize=32000 --numhashfiles=50"
+ " --starttime=1265875194289 --endtime=1265878794289 --families=cf2,cf3"
+ " TestTable /hashes/testTable");

View File

@ -699,7 +699,7 @@ public class SyncTable extends Configured implements Tool {
System.err.println("Examples:");
System.err.println(" For a dry run SyncTable of tableA from a remote source cluster");
System.err.println(" to a local target cluster:");
System.err.println(" $ bin/hbase " +
System.err.println(" $ hbase " +
"org.apache.hadoop.hbase.mapreduce.SyncTable --dryrun=true"
+ " --sourcezkcluster=zk1.example.com,zk2.example.com,zk3.example.com:2181:/hbase"
+ " hdfs://nn:9000/hashes/tableA tableA tableA");

View File

@ -540,7 +540,7 @@ public class VerifyReplication extends Configured implements Tool {
System.err.println();
System.err.println("Examples:");
System.err.println(" To verify the data replicated from TestTable for a 1 hour window with peer #5 ");
System.err.println(" $ bin/hbase " +
System.err.println(" $ hbase " +
"org.apache.hadoop.hbase.mapreduce.replication.VerifyReplication" +
" --starttime=1265875194289 --endtime=1265878794289 5 TestTable ");
}

View File

@ -460,10 +460,10 @@ public class CompactionTool extends Configured implements Tool {
System.err.println();
System.err.println("Examples:");
System.err.println(" To compact the full 'TestTable' using MapReduce:");
System.err.println(" $ bin/hbase " + this.getClass().getName() + " -mapred hdfs:///hbase/data/default/TestTable");
System.err.println(" $ hbase " + this.getClass().getName() + " -mapred hdfs:///hbase/data/default/TestTable");
System.err.println();
System.err.println(" To compact column family 'x' of the table 'TestTable' region 'abc':");
System.err.println(" $ bin/hbase " + this.getClass().getName() + " hdfs:///hbase/data/default/TestTable/abc/x");
System.err.println(" $ hbase " + this.getClass().getName() + " hdfs:///hbase/data/default/TestTable/abc/x");
}
public static void main(String[] args) throws Exception {

View File

@ -88,7 +88,7 @@ public class HRegionServerCommandLine extends ServerCommandLine {
} else if ("stop".equals(cmd)) {
System.err.println(
"To shutdown the regionserver run " +
"bin/hbase-daemon.sh stop regionserver or send a kill signal to " +
"hbase-daemon.sh stop regionserver or send a kill signal to " +
"the regionserver pid");
return 1;
} else {

View File

@ -121,7 +121,7 @@ public class TableCFsUpdater extends ReplicationStateZKBase {
}
private static void printUsageAndExit() {
System.err.printf("Usage: bin/hbase org.apache.hadoop.hbase.replication.master.TableCFsUpdater [options]");
System.err.printf("Usage: hbase org.apache.hadoop.hbase.replication.master.TableCFsUpdater [options]");
System.err.println(" where [options] are:");
System.err.println(" -h|-help Show this help and exit.");
System.err.println(" update Copy table-cfs to replication peer config");

View File

@ -172,7 +172,7 @@ public class DumpReplicationQueues extends Configured implements Tool {
if (message != null && message.length() > 0) {
System.err.println(message);
}
System.err.println("Usage: bin/hbase " + className + " \\");
System.err.println("Usage: hbase " + className + " \\");
System.err.println(" <OPTIONS> [-D<property=value>]*");
System.err.println();
System.err.println("General Options:");

View File

@ -541,7 +541,7 @@ public final class SnapshotInfo extends AbstractHBaseTool {
@Override
protected void printUsage() {
printUsage("bin/hbase snapshot info [options]", "Options:", "");
printUsage("hbase snapshot info [options]", "Options:", "");
System.err.println("Examples:");
System.err.println(" hbase snapshot info --snapshot MySnapshot --files");
}

View File

@ -776,7 +776,7 @@ public final class Canary implements Tool {
private void printUsageAndExit() {
System.err.printf(
"Usage: bin/hbase %s [opts] [table1 [table2]...] | [regionserver1 [regionserver2]..]%n",
"Usage: hbase %s [opts] [table1 [table2]...] | [regionserver1 [regionserver2]..]%n",
getClass().getName());
System.err.println(" where [opts] are:");
System.err.println(" -help Show this help and exit.");

View File

@ -247,7 +247,7 @@ public class Merge extends Configured implements Tool {
private void usage() {
System.err
.println("For hadoop 0.21+, Usage: bin/hbase org.apache.hadoop.hbase.util.Merge "
.println("For hadoop 0.21+, Usage: hbase org.apache.hadoop.hbase.util.Merge "
+ "[-Dfs.defaultFS=hdfs://nn:port] <table-name> <region-1> <region-2>\n");
}

View File

@ -1914,9 +1914,9 @@ public class PerformanceEvaluation extends Configured implements Tool {
+ "(and HRegionServers) running. 1 <= value <= 500");
System.err.println("Examples:");
System.err.println(" To run a single client doing the default 1M sequentialWrites:");
System.err.println(" $ bin/hbase " + className + " sequentialWrite 1");
System.err.println(" $ hbase " + className + " sequentialWrite 1");
System.err.println(" To run 10 clients doing increments over ten rows:");
System.err.println(" $ bin/hbase " + className + " --rows=10 --nomapred increment 10");
System.err.println(" $ hbase " + className + " --rows=10 --nomapred increment 10");
}
/**

View File

@ -455,7 +455,7 @@ public final class WALPerformanceEvaluation extends Configured implements Tool {
}
private void printUsageAndExit() {
System.err.printf("Usage: bin/hbase %s [options]\n", getClass().getName());
System.err.printf("Usage: hbase %s [options]\n", getClass().getName());
System.err.println(" where [options] are:");
System.err.println(" -h|-help Show this help and exit.");
System.err.println(" -threads <N> Number of threads writing on the WAL.");
@ -483,7 +483,7 @@ public final class WALPerformanceEvaluation extends Configured implements Tool {
System.err.println("");
System.err.println(" To run 100 threads on hdfs with log rolling every 10k edits and " +
"verification afterward do:");
System.err.println(" $ ./bin/hbase org.apache.hadoop.hbase.wal." +
System.err.println(" $ hbase org.apache.hadoop.hbase.wal." +
"WALPerformanceEvaluation \\");
System.err.println(" -conf ./core-site.xml -path hdfs://example.org:7000/tmp " +
"-threads 100 -roll 10000 -verify");

View File

@ -78,8 +78,8 @@ public class ThriftServer {
throws ExitCodeException {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("Thrift", null, options,
"To start the Thrift server run 'bin/hbase-daemon.sh start thrift'\n" +
"To shutdown the thrift server run 'bin/hbase-daemon.sh stop " +
"To start the Thrift server run 'hbase-daemon.sh start thrift'\n" +
"To shutdown the thrift server run 'hbase-daemon.sh stop " +
"thrift' or send a kill signal to the thrift server pid",
true);
throw new ExitCodeException(exitCode, "");

View File

@ -131,8 +131,8 @@ public class ThriftServer extends Configured implements Tool {
private static void printUsage() {
HelpFormatter formatter = new HelpFormatter();
formatter.printHelp("Thrift", null, getOptions(),
"To start the Thrift server run 'bin/hbase-daemon.sh start thrift2'\n" +
"To shutdown the thrift server run 'bin/hbase-daemon.sh stop thrift2' or" +
"To start the Thrift server run 'hbase-daemon.sh start thrift2'\n" +
"To shutdown the thrift server run 'hbase-daemon.sh stop thrift2' or" +
" send a kill signal to the thrift server pid",
true);
}