HADOOP-18359. Update commons-cli from 1.2 to 1.5. (#5095). Contributed by Shilun Fan.

Signed-off-by: Ayush Saxena <ayushsaxena@apache.org>
This commit is contained in:
slfan1989 2023-05-10 04:12:12 +08:00 committed by GitHub
parent 03bf8f982a
commit a2dda0ce03
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 383 additions and 432 deletions

View File

@ -245,7 +245,7 @@ com.squareup.okhttp3:okhttp:4.10.0
com.squareup.okio:okio:3.2.0 com.squareup.okio:okio:3.2.0
com.zaxxer:HikariCP:4.0.3 com.zaxxer:HikariCP:4.0.3
commons-beanutils:commons-beanutils:1.9.4 commons-beanutils:commons-beanutils:1.9.4
commons-cli:commons-cli:1.2 commons-cli:commons-cli:1.5.0
commons-codec:commons-codec:1.11 commons-codec:commons-codec:1.11
commons-collections:commons-collections:3.2.2 commons-collections:commons-collections:3.2.2
commons-daemon:commons-daemon:1.0.13 commons-daemon:commons-daemon:1.0.13

View File

@ -33,7 +33,6 @@ import org.slf4j.LoggerFactory;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.audit.CommonAuditContext; import org.apache.hadoop.fs.audit.CommonAuditContext;
@ -362,29 +361,28 @@ public class ServiceLauncher<S extends Service>
/** /**
* Override point: create an options instance to combine with the * Override point: create an options instance to combine with the
* standard options set. * standard options set.
* <i>Important. Synchronize uses of {@link OptionBuilder}</i> * <i>Important. Synchronize uses of {@link Option}</i>
* with {@code OptionBuilder.class} * with {@code Option.class}
* @return the new options * @return the new options
*/ */
@SuppressWarnings("static-access") @SuppressWarnings("static-access")
protected Options createOptions() { protected Options createOptions() {
synchronized (OptionBuilder.class) { synchronized (Option.class) {
Options options = new Options(); Options options = new Options();
Option oconf = OptionBuilder.withArgName("configuration file") Option oconf = Option.builder(ARG_CONF_SHORT).argName("configuration file")
.hasArg() .hasArg()
.withDescription("specify an application configuration file") .desc("specify an application configuration file")
.withLongOpt(ARG_CONF) .longOpt(ARG_CONF)
.create(ARG_CONF_SHORT); .build();
Option confclass = OptionBuilder.withArgName("configuration classname") Option confclass = Option.builder(ARG_CONFCLASS_SHORT).argName("configuration classname")
.hasArg() .hasArg()
.withDescription( .desc("Classname of a Hadoop Configuration subclass to load")
"Classname of a Hadoop Configuration subclass to load") .longOpt(ARG_CONFCLASS)
.withLongOpt(ARG_CONFCLASS) .build();
.create(ARG_CONFCLASS_SHORT); Option property = Option.builder("D").argName("property=value")
Option property = OptionBuilder.withArgName("property=value")
.hasArg() .hasArg()
.withDescription("use value for given property") .desc("use value for given property")
.create('D'); .build();
options.addOption(oconf); options.addOption(oconf);
options.addOption(property); options.addOption(property);
options.addOption(confclass); options.addOption(confclass);

View File

@ -46,7 +46,6 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.MissingArgumentException; import org.apache.commons.cli.MissingArgumentException;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -230,8 +229,8 @@ public final class ConfTest {
GenericOptionsParser genericParser = new GenericOptionsParser(args); GenericOptionsParser genericParser = new GenericOptionsParser(args);
String[] remainingArgs = genericParser.getRemainingArgs(); String[] remainingArgs = genericParser.getRemainingArgs();
Option conf = OptionBuilder.hasArg().create("conffile"); Option conf = Option.builder("conffile").hasArg().build();
Option help = OptionBuilder.withLongOpt("help").create('h'); Option help = Option.builder("h").longOpt("help").hasArg().build();
Options opts = new Options().addOption(conf).addOption(help); Options opts = new Options().addOption(conf).addOption(help);
CommandLineParser specificParser = new GnuParser(); CommandLineParser specificParser = new GnuParser();
CommandLine cmd = null; CommandLine cmd = null;

View File

@ -32,7 +32,6 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -225,51 +224,50 @@ public class GenericOptionsParser {
/** /**
* @return Specify properties of each generic option. * @return Specify properties of each generic option.
* <i>Important</i>: as {@link OptionBuilder} is not thread safe, subclasses * <i>Important</i>: as {@link Option} is not thread safe, subclasses
* must synchronize use on {@code OptionBuilder.class} * must synchronize use on {@code Option.class}
* @param opts input opts. * @param opts input opts.
*/ */
@SuppressWarnings("static-access") @SuppressWarnings("static-access")
protected Options buildGeneralOptions(Options opts) { protected Options buildGeneralOptions(Options opts) {
synchronized (OptionBuilder.class) { synchronized (Option.class) {
Option fs = OptionBuilder.withArgName("file:///|hdfs://namenode:port") Option fs = Option.builder("fs").argName("file:///|hdfs://namenode:port")
.hasArg() .hasArg()
.withDescription("specify default filesystem URL to use, " .desc("specify default filesystem URL to use, "
+ "overrides 'fs.defaultFS' property from configurations.") + "overrides 'fs.defaultFS' property from configurations.")
.create("fs"); .build();
Option jt = OptionBuilder.withArgName("local|resourcemanager:port") Option jt = Option.builder("jt").argName("local|resourcemanager:port")
.hasArg() .hasArg()
.withDescription("specify a ResourceManager") .desc("specify a ResourceManager")
.create("jt"); .build();
Option oconf = OptionBuilder.withArgName("configuration file") Option oconf = Option.builder("conf").argName("configuration file")
.hasArg() .hasArg()
.withDescription("specify an application configuration file") .desc("specify an application configuration file")
.create("conf"); .build();
Option property = OptionBuilder.withArgName("property=value") Option property = Option.builder("D").argName("property=value")
.hasArg() .hasArg()
.withDescription("use value for given property") .desc("use value for given property")
.create('D'); .build();
Option libjars = OptionBuilder.withArgName("paths") Option libjars = Option.builder("libjars").argName("paths")
.hasArg() .hasArg()
.withDescription( .desc("comma separated jar files to include in the classpath.")
"comma separated jar files to include in the classpath.") .build();
.create("libjars"); Option files = Option.builder("files").argName("paths")
Option files = OptionBuilder.withArgName("paths")
.hasArg() .hasArg()
.withDescription("comma separated files to be copied to the " + .desc("comma separated files to be copied to the " +
"map reduce cluster") "map reduce cluster")
.create("files"); .build();
Option archives = OptionBuilder.withArgName("paths") Option archives = Option.builder("archives").argName("paths")
.hasArg() .hasArg()
.withDescription("comma separated archives to be unarchived" + .desc("comma separated archives to be unarchived" +
" on the compute machines.") " on the compute machines.")
.create("archives"); .build();
// file with security tokens // file with security tokens
Option tokensFile = OptionBuilder.withArgName("tokensFile") Option tokensFile = Option.builder("tokenCacheFile").argName("tokensFile")
.hasArg() .hasArg()
.withDescription("name of the file with the tokens") .desc("name of the file with the tokens")
.create("tokenCacheFile"); .build();
opts.addOption(fs); opts.addOption(fs);

View File

@ -29,10 +29,9 @@ import org.junit.Test;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -272,7 +271,7 @@ public class TestTFileSeek {
try { try {
Options opts = buildOptions(); Options opts = buildOptions();
CommandLineParser parser = new GnuParser(); CommandLineParser parser = new DefaultParser();
CommandLine line = parser.parse(opts, args, true); CommandLine line = parser.parse(opts, args, true);
processOptions(line, opts); processOptions(line, opts);
validateOptions(); validateOptions();
@ -290,81 +289,56 @@ public class TestTFileSeek {
private Options buildOptions() { private Options buildOptions() {
Option compress = Option compress =
OptionBuilder.withLongOpt("compress").withArgName("[none|lzo|gz]") Option.builder("c").longOpt("compress").argName("[none|lzo|gz]")
.hasArg().withDescription("compression scheme").create('c'); .hasArg().desc("compression scheme").build();
Option fileSize = Option fileSize =
OptionBuilder.withLongOpt("file-size").withArgName("size-in-MB") Option.builder("s").longOpt("file-size").argName("size-in-MB")
.hasArg().withDescription("target size of the file (in MB).") .hasArg().desc("target size of the file (in MB).").build();
.create('s');
Option fsInputBufferSz = Option fsInputBufferSz =
OptionBuilder.withLongOpt("fs-input-buffer").withArgName("size") Option.builder("i").longOpt("fs-input-buffer").argName("size")
.hasArg().withDescription( .hasArg().desc("size of the file system input buffer (in bytes).").build();
"size of the file system input buffer (in bytes).").create(
'i');
Option fsOutputBufferSize = Option fsOutputBufferSize =
OptionBuilder.withLongOpt("fs-output-buffer").withArgName("size") Option.builder("o").longOpt("fs-output-buffer").argName("size")
.hasArg().withDescription( .hasArg().desc("size of the file system output buffer (in bytes).").build();
"size of the file system output buffer (in bytes).").create(
'o');
Option keyLen = Option keyLen =
OptionBuilder Option.builder("k").longOpt("key-length").argName("min,max")
.withLongOpt("key-length") .hasArg().desc("the length range of the key (in bytes)").build();
.withArgName("min,max")
.hasArg()
.withDescription(
"the length range of the key (in bytes)")
.create('k');
Option valueLen = Option valueLen =
OptionBuilder Option.builder("v").longOpt("value-length").argName("min,max")
.withLongOpt("value-length") .hasArg().desc("the length range of the value (in bytes)").build();
.withArgName("min,max")
.hasArg()
.withDescription(
"the length range of the value (in bytes)")
.create('v');
Option blockSz = Option blockSz =
OptionBuilder.withLongOpt("block").withArgName("size-in-KB").hasArg() Option.builder("b").longOpt("block").argName("size-in-KB").hasArg()
.withDescription("minimum block size (in KB)").create('b'); .desc("minimum block size (in KB)").build();
Option seed = Option seed =
OptionBuilder.withLongOpt("seed").withArgName("long-int").hasArg() Option.builder("S").longOpt("seed").argName("long-int").hasArg()
.withDescription("specify the seed").create('S'); .desc("specify the seed").build();
Option operation = Option operation =
OptionBuilder.withLongOpt("operation").withArgName("r|w|rw").hasArg() Option.builder("x").longOpt("operation").argName("r|w|rw").hasArg()
.withDescription( .desc("action: seek-only, create-only, seek-after-create").build();
"action: seek-only, create-only, seek-after-create").create(
'x');
Option rootDir = Option rootDir =
OptionBuilder.withLongOpt("root-dir").withArgName("path").hasArg() Option.builder("r").longOpt("root-dir").argName("path").hasArg()
.withDescription( .desc("specify root directory where files will be created.").build();
"specify root directory where files will be created.")
.create('r');
Option file = Option file =
OptionBuilder.withLongOpt("file").withArgName("name").hasArg() Option.builder("f").longOpt("file").argName("name").hasArg()
.withDescription("specify the file name to be created or read.") .desc("specify the file name to be created or read.").build();
.create('f');
Option seekCount = Option seekCount =
OptionBuilder Option.builder("n").longOpt("seek").argName("count").hasArg()
.withLongOpt("seek") .desc("specify how many seek operations we perform (requires -x r or -x rw.").build();
.withArgName("count")
.hasArg()
.withDescription(
"specify how many seek operations we perform (requires -x r or -x rw.")
.create('n');
Option help = Option help =
OptionBuilder.withLongOpt("help").hasArg(false).withDescription( Option.builder("h").longOpt("help").hasArg(false)
"show this screen").create("h"); .desc("show this screen").build();
return new Options().addOption(compress).addOption(fileSize).addOption( return new Options().addOption(compress).addOption(fileSize).addOption(
fsInputBufferSz).addOption(fsOutputBufferSize).addOption(keyLen) fsInputBufferSz).addOption(fsOutputBufferSize).addOption(keyLen)

View File

@ -30,10 +30,9 @@ import org.junit.Test;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.DefaultParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -553,7 +552,7 @@ public class TestTFileSeqFileComparison {
try { try {
Options opts = buildOptions(); Options opts = buildOptions();
CommandLineParser parser = new GnuParser(); CommandLineParser parser = new DefaultParser();
CommandLine line = parser.parse(opts, args, true); CommandLine line = parser.parse(opts, args, true);
processOptions(line, opts); processOptions(line, opts);
validateOptions(); validateOptions();
@ -571,87 +570,70 @@ public class TestTFileSeqFileComparison {
private Options buildOptions() { private Options buildOptions() {
Option compress = Option compress =
OptionBuilder.withLongOpt("compress").withArgName("[none|lzo|gz]") Option.builder("c").longOpt("compress").argName("[none|lzo|gz]")
.hasArg().withDescription("compression scheme").create('c'); .hasArg().desc("compression scheme").build();
Option ditSize = Option ditSize =
OptionBuilder.withLongOpt("dict").withArgName("size").hasArg() Option.builder("d").longOpt("dict").argName("size")
.withDescription("number of dictionary entries").create('d'); .hasArg().desc("number of dictionary entries").build();
Option fileSize = Option fileSize =
OptionBuilder.withLongOpt("file-size").withArgName("size-in-MB") Option.builder("s").longOpt("file-size").argName("size-in-MB")
.hasArg().withDescription("target size of the file (in MB).") .hasArg().desc("target size of the file (in MB).").build();
.create('s');
Option format = Option format =
OptionBuilder.withLongOpt("format").withArgName("[tfile|seqfile]") Option.builder("f").longOpt("format").argName("[tfile|seqfile]")
.hasArg().withDescription("choose TFile or SeqFile").create('f'); .hasArg().desc("choose TFile or SeqFile").build();
Option fsInputBufferSz = Option fsInputBufferSz =
OptionBuilder.withLongOpt("fs-input-buffer").withArgName("size") Option.builder("i").longOpt("fs-input-buffer").argName("size")
.hasArg().withDescription( .hasArg().desc("size of the file system input buffer (in bytes).").build();
"size of the file system input buffer (in bytes).").create(
'i');
Option fsOutputBufferSize = Option fsOutputBufferSize =
OptionBuilder.withLongOpt("fs-output-buffer").withArgName("size") Option.builder("o").longOpt("fs-output-buffer").argName("size")
.hasArg().withDescription( .hasArg().desc("size of the file system output buffer (in bytes).").build();
"size of the file system output buffer (in bytes).").create(
'o');
Option keyLen = Option keyLen =
OptionBuilder Option.builder("o").longOpt("key-length").argName("length")
.withLongOpt("key-length")
.withArgName("length")
.hasArg() .hasArg()
.withDescription( .desc("base length of the key (in bytes), actual length varies in [base, 2*base)")
"base length of the key (in bytes), actual length varies in [base, 2*base)") .build();
.create('k');
Option valueLen = Option valueLen =
OptionBuilder Option.builder("v").longOpt("key-length").argName("length")
.withLongOpt("value-length") .longOpt("value-length").argName("length").hasArg()
.withArgName("length") .desc("base length of the value (in bytes), actual length varies in [base, 2*base)")
.hasArg() .build();
.withDescription(
"base length of the value (in bytes), actual length varies in [base, 2*base)")
.create('v');
Option wordLen = Option wordLen =
OptionBuilder.withLongOpt("word-length").withArgName("min,max") Option.builder("w").longOpt("word-length").argName("min,max")
.hasArg().withDescription( .hasArg().desc("range of dictionary word length (in bytes)").build();
"range of dictionary word length (in bytes)").create('w');
Option blockSz = Option blockSz =
OptionBuilder.withLongOpt("block").withArgName("size-in-KB").hasArg() Option.builder("b").longOpt("block").argName("size-in-KB").hasArg()
.withDescription("minimum block size (in KB)").create('b'); .desc("minimum block size (in KB)").build();
Option seed = Option seed =
OptionBuilder.withLongOpt("seed").withArgName("long-int").hasArg() Option.builder("S").longOpt("seed").argName("long-int").hasArg()
.withDescription("specify the seed").create('S'); .desc("specify the seed").build();
Option operation = Option operation =
OptionBuilder.withLongOpt("operation").withArgName("r|w|rw").hasArg() Option.builder("x").longOpt("operation").argName("r|w|rw").hasArg()
.withDescription( .desc("action: read-only, create-only, read-after-create").build();
"action: read-only, create-only, read-after-create").create(
'x');
Option rootDir = Option rootDir =
OptionBuilder.withLongOpt("root-dir").withArgName("path").hasArg() Option.builder("r").longOpt("root-dir").argName("path").hasArg()
.withDescription( .desc("specify root directory where files will be created.").build();
"specify root directory where files will be created.")
.create('r');
Option help = Option help =
OptionBuilder.withLongOpt("help").hasArg(false).withDescription( Option.builder("h").longOpt("help").hasArg(false)
"show this screen").create("h"); .desc("show this screen").build();
return new Options().addOption(compress).addOption(ditSize).addOption( return new Options().addOption(compress).addOption(ditSize).addOption(
fileSize).addOption(format).addOption(fsInputBufferSz).addOption( fileSize).addOption(format).addOption(fsInputBufferSz).addOption(
fsOutputBufferSize).addOption(keyLen).addOption(wordLen).addOption( fsOutputBufferSize).addOption(keyLen).addOption(wordLen).addOption(
blockSz).addOption(rootDir).addOption(valueLen).addOption(operation) blockSz).addOption(rootDir).addOption(valueLen).addOption(operation)
.addOption(help); .addOption(help);
} }
private void processOptions(CommandLine line, Options opts) private void processOptions(CommandLine line, Options opts)

View File

@ -23,7 +23,7 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -88,59 +88,78 @@ public class RPCCallBenchmark extends TestRpcBase implements Tool {
} }
} }
@SuppressWarnings("static-access")
private Options buildOptions() { private Options buildOptions() {
Options opts = new Options(); Options opts = new Options();
opts.addOption( opts.addOption(
OptionBuilder.withLongOpt("serverThreads").hasArg(true) Option.builder("s")
.withArgName("numthreads") .longOpt("serverThreads")
.withDescription("number of server threads (handlers) to run (or 0 to not run server)") .hasArg(true)
.create("s")); .argName("numthreads")
opts.addOption( .desc("number of server threads (handlers) to run (or 0 to not run server)")
OptionBuilder.withLongOpt("serverReaderThreads").hasArg(true) .build());
.withArgName("threads")
.withDescription("number of server reader threads to run")
.create("r"));
opts.addOption( opts.addOption(
OptionBuilder.withLongOpt("clientThreads").hasArg(true) Option.builder("r")
.withArgName("numthreads") .longOpt("serverReaderThreads")
.withDescription("number of client threads to run (or 0 to not run client)") .hasArg(true)
.create("c")); .argName("threads")
.desc("number of server reader threads to run")
.build());
opts.addOption( opts.addOption(
OptionBuilder.withLongOpt("messageSize").hasArg(true) Option.builder("c")
.withArgName("bytes") .longOpt("clientThreads")
.withDescription("size of call parameter in bytes") .hasArg(true)
.create("m")); .argName("numthreads")
.desc("number of client threads to run (or 0 to not run client)")
.build());
opts.addOption( opts.addOption(
OptionBuilder.withLongOpt("time").hasArg(true) Option.builder("m")
.withArgName("seconds") .longOpt("messageSize")
.withDescription("number of seconds to run clients for") .hasArg(true)
.create("t")); .argName("bytes")
opts.addOption( .desc("size of call parameter in bytes")
OptionBuilder.withLongOpt("port").hasArg(true) .build());
.withArgName("port")
.withDescription("port to listen or connect on")
.create("p"));
opts.addOption(
OptionBuilder.withLongOpt("host").hasArg(true)
.withArgName("addr")
.withDescription("host to listen or connect on")
.create('h'));
opts.addOption( opts.addOption(
OptionBuilder.withLongOpt("engine").hasArg(true) Option.builder("t")
.withArgName("protobuf") .longOpt("time")
.withDescription("engine to use") .hasArg(true)
.create('e')); .argName("seconds")
.desc("number of seconds to run clients for")
.build());
opts.addOption( opts.addOption(
OptionBuilder.withLongOpt("help").hasArg(false) Option.builder("p")
.withDescription("show this screen") .longOpt("port")
.create('?')); .hasArg(true)
.argName("port")
.desc("port to listen or connect on")
.build());
opts.addOption(
Option.builder("h")
.longOpt("host")
.hasArg(true)
.argName("addr")
.desc("host to listen or connect on")
.build());
opts.addOption(
Option.builder("e")
.longOpt("engine")
.hasArg(true)
.argName("protobuf")
.desc("engine to use")
.build());
opts.addOption(
Option.builder("?")
.longOpt("help")
.hasArg(false)
.desc("show this screen")
.build());
return opts; return opts;
} }

View File

@ -34,7 +34,6 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.math3.util.Pair; import org.apache.commons.math3.util.Pair;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -198,10 +197,11 @@ public class TestGenericOptionsParser {
@Test @Test
public void testCreateWithOptions() throws Exception { public void testCreateWithOptions() throws Exception {
// Create new option newOpt // Create new option newOpt
Option opt = OptionBuilder.withArgName("int")
Option opt = Option.builder("newOpt").argName("int")
.hasArg() .hasArg()
.withDescription("A new option") .desc("A new option")
.create("newOpt"); .build();
Options opts = new Options(); Options opts = new Options();
opts.addOption(opt); opts.addOption(opt);

View File

@ -32,7 +32,6 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -246,29 +245,29 @@ public class RegistryCli extends Configured implements Tool, Closeable {
} }
public int bind(String[] args) { public int bind(String[] args) {
Option rest = OptionBuilder.withArgName("rest") Option rest = Option.builder("rest").argName("rest")
.hasArg() .hasArg()
.withDescription("rest Option") .desc("rest Option")
.create("rest"); .build();
Option webui = OptionBuilder.withArgName("webui") Option webui = Option.builder("webui").argName("webui")
.hasArg() .hasArg()
.withDescription("webui Option") .desc("webui Option")
.create("webui"); .build();
Option inet = OptionBuilder.withArgName("inet") Option inet = Option.builder("inet").argName("inet")
.withDescription("inet Option") .desc("inet Option")
.create("inet"); .build();
Option port = OptionBuilder.withArgName("port") Option port = Option.builder("p").argName("port")
.hasArg() .hasArg()
.withDescription("port to listen on [9999]") .desc("port to listen on [9999]")
.create("p"); .build();
Option host = OptionBuilder.withArgName("host") Option host = Option.builder("h").argName("host")
.hasArg() .hasArg()
.withDescription("host name") .desc("host name")
.create("h"); .build();
Option apiOpt = OptionBuilder.withArgName("api") Option apiOpt = Option.builder("api").argName("api")
.hasArg() .hasArg()
.withDescription("api") .desc("api")
.create("api"); .build();
Options inetOption = new Options(); Options inetOption = new Options();
inetOption.addOption(inet); inetOption.addOption(inet);
inetOption.addOption(port); inetOption.addOption(port);
@ -412,9 +411,9 @@ public class RegistryCli extends Configured implements Tool, Closeable {
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public int rm(String[] args) { public int rm(String[] args) {
Option recursive = OptionBuilder.withArgName("recursive") Option recursive = Option.builder("r").argName("recursive")
.withDescription("delete recursively") .desc("delete recursively")
.create("r"); .build();
Options rmOption = new Options(); Options rmOption = new Options();
rmOption.addOption(recursive); rmOption.addOption(recursive);

View File

@ -724,12 +724,12 @@ public class Mover {
private static Options buildCliOptions() { private static Options buildCliOptions() {
Options opts = new Options(); Options opts = new Options();
Option file = OptionBuilder.withArgName("pathsFile").hasArg() Option file = Option.builder("f").argName("pathsFile").hasArg()
.withDescription("a local file containing files/dirs to migrate") .desc("a local file containing files/dirs to migrate")
.create("f"); .build();
Option paths = OptionBuilder.withArgName("paths").hasArgs() Option paths = Option.builder("p").argName("paths").hasArgs()
.withDescription("specify space separated files/dirs to migrate") .desc("specify space separated files/dirs to migrate")
.create("p"); .build();
OptionGroup group = new OptionGroup(); OptionGroup group = new OptionGroup();
group.addOption(file); group.addOption(file);
group.addOption(paths); group.addOption(paths);

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.tools;
import org.apache.commons.cli.BasicParser; import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -279,34 +278,34 @@ public class DiskBalancerCLI extends Configured implements Tool {
*/ */
private void addPlanCommands(Options opt) { private void addPlanCommands(Options opt) {
Option plan = OptionBuilder.withLongOpt(PLAN) Option plan = Option.builder().longOpt(PLAN)
.withDescription("Hostname, IP address or UUID of datanode " + .desc("Hostname, IP address or UUID of datanode " +
"for which a plan is created.") "for which a plan is created.")
.hasArg() .hasArg()
.create(); .build();
getPlanOptions().addOption(plan); getPlanOptions().addOption(plan);
opt.addOption(plan); opt.addOption(plan);
Option outFile = OptionBuilder.withLongOpt(OUTFILE).hasArg() Option outFile = Option.builder().longOpt(OUTFILE).hasArg()
.withDescription( .desc(
"Local path of file to write output to, if not specified " "Local path of file to write output to, if not specified "
+ "defaults will be used.") + "defaults will be used.")
.create(); .build();
getPlanOptions().addOption(outFile); getPlanOptions().addOption(outFile);
opt.addOption(outFile); opt.addOption(outFile);
Option bandwidth = OptionBuilder.withLongOpt(BANDWIDTH).hasArg() Option bandwidth = Option.builder().longOpt(BANDWIDTH).hasArg()
.withDescription( .desc(
"Maximum disk bandwidth (MB/s) in integer to be consumed by " "Maximum disk bandwidth (MB/s) in integer to be consumed by "
+ "diskBalancer. e.g. 10 MB/s.") + "diskBalancer. e.g. 10 MB/s.")
.create(); .build();
getPlanOptions().addOption(bandwidth); getPlanOptions().addOption(bandwidth);
opt.addOption(bandwidth); opt.addOption(bandwidth);
Option threshold = OptionBuilder.withLongOpt(THRESHOLD) Option threshold = Option.builder().longOpt(THRESHOLD)
.hasArg() .hasArg()
.withDescription("Percentage of data skew that is tolerated before" .desc("Percentage of data skew that is tolerated before"
+ " disk balancer starts working. For example, if" + " disk balancer starts working. For example, if"
+ " total data on a 2 disk node is 100 GB then disk" + " total data on a 2 disk node is 100 GB then disk"
+ " balancer calculates the expected value on each disk," + " balancer calculates the expected value on each disk,"
@ -314,22 +313,22 @@ public class DiskBalancerCLI extends Configured implements Tool {
+ " on a single disk needs to be more than 60 GB" + " on a single disk needs to be more than 60 GB"
+ " (50 GB + 10% tolerance value) for Disk balancer to" + " (50 GB + 10% tolerance value) for Disk balancer to"
+ " balance the disks.") + " balance the disks.")
.create(); .build();
getPlanOptions().addOption(threshold); getPlanOptions().addOption(threshold);
opt.addOption(threshold); opt.addOption(threshold);
Option maxError = OptionBuilder.withLongOpt(MAXERROR) Option maxError = Option.builder().longOpt(MAXERROR)
.hasArg() .hasArg()
.withDescription("Describes how many errors " + .desc("Describes how many errors " +
"can be tolerated while copying between a pair of disks.") "can be tolerated while copying between a pair of disks.")
.create(); .build();
getPlanOptions().addOption(maxError); getPlanOptions().addOption(maxError);
opt.addOption(maxError); opt.addOption(maxError);
Option verbose = OptionBuilder.withLongOpt(VERBOSE) Option verbose = Option.builder().longOpt(VERBOSE)
.withDescription("Print out the summary of the plan on console") .desc("Print out the summary of the plan on console")
.create(); .build();
getPlanOptions().addOption(verbose); getPlanOptions().addOption(verbose);
opt.addOption(verbose); opt.addOption(verbose);
} }
@ -338,11 +337,11 @@ public class DiskBalancerCLI extends Configured implements Tool {
* Adds Help to the options. * Adds Help to the options.
*/ */
private void addHelpCommands(Options opt) { private void addHelpCommands(Options opt) {
Option help = OptionBuilder.withLongOpt(HELP) Option help = Option.builder().longOpt(HELP)
.hasOptionalArg() .optionalArg(true)
.withDescription("valid commands are plan | execute | query | cancel" + .desc("valid commands are plan | execute | query | cancel" +
" | report") " | report")
.create(); .build();
getHelpOptions().addOption(help); getHelpOptions().addOption(help);
opt.addOption(help); opt.addOption(help);
} }
@ -353,17 +352,17 @@ public class DiskBalancerCLI extends Configured implements Tool {
* @param opt Options * @param opt Options
*/ */
private void addExecuteCommands(Options opt) { private void addExecuteCommands(Options opt) {
Option execute = OptionBuilder.withLongOpt(EXECUTE) Option execute = Option.builder().longOpt(EXECUTE)
.hasArg() .hasArg()
.withDescription("Takes a plan file and " + .desc("Takes a plan file and " +
"submits it for execution by the datanode.") "submits it for execution by the datanode.")
.create(); .build();
getExecuteOptions().addOption(execute); getExecuteOptions().addOption(execute);
Option skipDateCheck = OptionBuilder.withLongOpt(SKIPDATECHECK) Option skipDateCheck = Option.builder().longOpt(SKIPDATECHECK)
.withDescription("skips the date check and force execute the plan") .desc("skips the date check and force execute the plan")
.create(); .build();
getExecuteOptions().addOption(skipDateCheck); getExecuteOptions().addOption(skipDateCheck);
opt.addOption(execute); opt.addOption(execute);
@ -376,20 +375,20 @@ public class DiskBalancerCLI extends Configured implements Tool {
* @param opt Options * @param opt Options
*/ */
private void addQueryCommands(Options opt) { private void addQueryCommands(Options opt) {
Option query = OptionBuilder.withLongOpt(QUERY) Option query = Option.builder().longOpt(QUERY)
.hasArg() .hasArg()
.withDescription("Queries the disk balancer " + .desc("Queries the disk balancer " +
"status of a given datanode.") "status of a given datanode.")
.create(); .build();
getQueryOptions().addOption(query); getQueryOptions().addOption(query);
opt.addOption(query); opt.addOption(query);
// Please note: Adding this only to Query options since -v is already // Please note: Adding this only to Query options since -v is already
// added to global table. // added to global table.
Option verbose = OptionBuilder.withLongOpt(VERBOSE) Option verbose = Option.builder().longOpt(VERBOSE)
.withDescription("Prints details of the plan that is being executed " + .desc("Prints details of the plan that is being executed " +
"on the node.") "on the node.")
.create(); .build();
getQueryOptions().addOption(verbose); getQueryOptions().addOption(verbose);
} }
@ -399,17 +398,17 @@ public class DiskBalancerCLI extends Configured implements Tool {
* @param opt Options * @param opt Options
*/ */
private void addCancelCommands(Options opt) { private void addCancelCommands(Options opt) {
Option cancel = OptionBuilder.withLongOpt(CANCEL) Option cancel = Option.builder().longOpt(CANCEL)
.hasArg() .hasArg()
.withDescription("Cancels a running plan using a plan file.") .desc("Cancels a running plan using a plan file.")
.create(); .build();
getCancelOptions().addOption(cancel); getCancelOptions().addOption(cancel);
opt.addOption(cancel); opt.addOption(cancel);
Option node = OptionBuilder.withLongOpt(NODE) Option node = Option.builder().longOpt(NODE)
.hasArg() .hasArg()
.withDescription("Cancels a running plan using a plan ID and hostName") .desc("Cancels a running plan using a plan ID and hostName")
.create(); .build();
getCancelOptions().addOption(node); getCancelOptions().addOption(node);
opt.addOption(node); opt.addOption(node);
@ -421,26 +420,26 @@ public class DiskBalancerCLI extends Configured implements Tool {
* @param opt Options * @param opt Options
*/ */
private void addReportCommands(Options opt) { private void addReportCommands(Options opt) {
Option report = OptionBuilder.withLongOpt(REPORT) Option report = Option.builder().longOpt(REPORT)
.withDescription("List nodes that will benefit from running " + .desc("List nodes that will benefit from running " +
"DiskBalancer.") "DiskBalancer.")
.create(); .build();
getReportOptions().addOption(report); getReportOptions().addOption(report);
opt.addOption(report); opt.addOption(report);
Option top = OptionBuilder.withLongOpt(TOP) Option top = Option.builder().longOpt(TOP)
.hasArg() .hasArg()
.withDescription("specify the number of nodes to be listed which has" + .desc("specify the number of nodes to be listed which has" +
" data imbalance.") " data imbalance.")
.create(); .build();
getReportOptions().addOption(top); getReportOptions().addOption(top);
opt.addOption(top); opt.addOption(top);
Option node = OptionBuilder.withLongOpt(NODE) Option node = Option.builder().longOpt(NODE)
.hasArg() .hasArg()
.withDescription("Datanode address, " + .desc("Datanode address, " +
"it can be DataNodeID, IP or hostname.") "it can be DataNodeID, IP or hostname.")
.create(); .build();
getReportOptions().addOption(node); getReportOptions().addOption(node);
opt.addOption(node); opt.addOption(node);
} }

View File

@ -39,7 +39,6 @@ import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
@ -247,39 +246,33 @@ public class JMXGet {
private static CommandLine parseArgs(Options opts, String... args) private static CommandLine parseArgs(Options opts, String... args)
throws IllegalArgumentException { throws IllegalArgumentException {
OptionBuilder.withArgName("NameNode|DataNode"); Option jmxService = Option.builder("service")
OptionBuilder.hasArg(); .argName("NameNode|DataNode").hasArg()
OptionBuilder.withDescription("specify jmx service (NameNode by default)"); .desc("specify jmx service (NameNode by default)").build();
Option jmx_service = OptionBuilder.create("service");
OptionBuilder.withArgName("mbean server"); Option jmxServer = Option.builder("server")
OptionBuilder.hasArg(); .argName("mbean server").hasArg()
OptionBuilder .desc("specify mbean server (localhost by default)").build();
.withDescription("specify mbean server (localhost by default)");
Option jmx_server = OptionBuilder.create("server");
OptionBuilder.withDescription("print help"); Option jmxHelp = Option.builder("help").desc("print help").build();
Option jmx_help = OptionBuilder.create("help");
OptionBuilder.withArgName("mbean server port"); Option jmxPort = Option.builder("port")
OptionBuilder.hasArg(); .argName("mbean server port")
OptionBuilder.withDescription("specify mbean server port, " .hasArg().desc("specify mbean server port, "
+ "if missing - it will try to connect to MBean Server in the same VM"); + "if missing - it will try to connect to MBean Server in the same VM").build();
Option jmx_port = OptionBuilder.create("port");
OptionBuilder.withArgName("VM's connector url"); Option jmxLocalVM = Option.builder("localVM")
OptionBuilder.hasArg(); .argName("VM's connector url").hasArg()
OptionBuilder.withDescription("connect to the VM on the same machine;" .desc("connect to the VM on the same machine;"
+ "\n use:\n jstat -J-Djstat.showUnsupported=true -snap <vmpid> | " + "\n use:\n jstat -J-Djstat.showUnsupported=true -snap <vmpid> | "
+ "grep sun.management.JMXConnectorServer.address\n " + "grep sun.management.JMXConnectorServer.address\n "
+ "to find the url"); + "to find the url").build();
Option jmx_localVM = OptionBuilder.create("localVM");
opts.addOption(jmx_server); opts.addOption(jmxServer);
opts.addOption(jmx_help); opts.addOption(jmxHelp);
opts.addOption(jmx_service); opts.addOption(jmxService);
opts.addOption(jmx_port); opts.addOption(jmxPort);
opts.addOption(jmx_localVM); opts.addOption(jmxLocalVM);
CommandLine commandLine = null; CommandLine commandLine = null;
CommandLineParser parser = new GnuParser(); CommandLineParser parser = new GnuParser();

View File

@ -28,7 +28,7 @@ import org.apache.hadoop.util.ToolRunner;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
@ -100,15 +100,13 @@ public class OfflineEditsViewer extends Configured implements Tool {
// Build in/output file arguments, which are required, but there is no // Build in/output file arguments, which are required, but there is no
// addOption method that can specify this // addOption method that can specify this
OptionBuilder.isRequired(); Option optionOutputFileName =
OptionBuilder.hasArgs(); Option.builder("o").required().hasArgs().longOpt("outputFilename").build();
OptionBuilder.withLongOpt("outputFilename"); options.addOption(optionOutputFileName);
options.addOption(OptionBuilder.create("o"));
OptionBuilder.isRequired(); Option optionInputFilename =
OptionBuilder.hasArgs(); Option.builder("i").required().hasArgs().longOpt("inputFilename").build();
OptionBuilder.withLongOpt("inputFilename"); options.addOption(optionInputFilename);
options.addOption(OptionBuilder.create("i"));
options.addOption("p", "processor", true, ""); options.addOption("p", "processor", true, "");
options.addOption("v", "verbose", false, ""); options.addOption("v", "verbose", false, "");

View File

@ -26,7 +26,7 @@ import java.nio.file.Paths;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
@ -177,15 +177,9 @@ public class OfflineImageViewer {
// Build in/output file arguments, which are required, but there is no // Build in/output file arguments, which are required, but there is no
// addOption method that can specify this // addOption method that can specify this
OptionBuilder.isRequired(); options.addOption(Option.builder("o").required().hasArgs().longOpt("outputFile").build());
OptionBuilder.hasArgs();
OptionBuilder.withLongOpt("outputFile");
options.addOption(OptionBuilder.create("o"));
OptionBuilder.isRequired(); options.addOption(Option.builder("i").required().hasArgs().longOpt("inputFile").build());
OptionBuilder.hasArgs();
OptionBuilder.withLongOpt("inputFile");
options.addOption(OptionBuilder.create("i"));
options.addOption("p", "processor", true, ""); options.addOption("p", "processor", true, "");
options.addOption("h", "help", false, ""); options.addOption("h", "help", false, "");

View File

@ -24,7 +24,7 @@ import java.io.RandomAccessFile;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
@ -137,10 +137,8 @@ public class OfflineImageViewerPB {
// Build in/output file arguments, which are required, but there is no // Build in/output file arguments, which are required, but there is no
// addOption method that can specify this // addOption method that can specify this
OptionBuilder.isRequired(); Option optionInputFile = Option.builder("i").required().hasArgs().longOpt("inputFile").build();
OptionBuilder.hasArgs(); options.addOption(optionInputFile);
OptionBuilder.withLongOpt("inputFile");
options.addOption(OptionBuilder.create("i"));
options.addOption("o", "outputFile", true, ""); options.addOption("o", "outputFile", true, "");
options.addOption("p", "processor", true, ""); options.addOption("p", "processor", true, "");

View File

@ -29,7 +29,7 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -88,23 +88,23 @@ public class MiniDFSClusterManager {
.addOption("httpport", true, "NameNode http port (default 0--we choose)") .addOption("httpport", true, "NameNode http port (default 0--we choose)")
.addOption("namenode", true, "URL of the namenode (default " .addOption("namenode", true, "URL of the namenode (default "
+ "is either the DFS cluster or a temporary dir)") + "is either the DFS cluster or a temporary dir)")
.addOption(OptionBuilder .addOption(Option.builder("D")
.hasArgs() .hasArgs()
.withArgName("property=value") .argName("property=value")
.withDescription("Options to pass into configuration object") .desc("Options to pass into configuration object")
.create("D")) .build())
.addOption(OptionBuilder .addOption(Option.builder("writeConfig")
.hasArg() .hasArg()
.withArgName("path") .argName("path")
.withDescription("Save configuration to this XML file.") .desc("Save configuration to this XML file.")
.create("writeConfig")) .build())
.addOption(OptionBuilder .addOption(Option.builder("writeDetails")
.hasArg() .hasArg()
.withArgName("path") .argName("path")
.withDescription("Write basic information to this JSON file.") .desc("Write basic information to this JSON file.")
.create("writeDetails")) .build())
.addOption(OptionBuilder.withDescription("Prints option help.") .addOption(Option.builder("help").desc("Prints option help.")
.create("help")); .build());
return options; return options;
} }

View File

@ -30,7 +30,6 @@ import java.util.StringTokenizer;
import org.apache.commons.cli.BasicParser; import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.commons.cli.Parser; import org.apache.commons.cli.Parser;
@ -346,12 +345,14 @@ public class Submitter extends Configured implements Tool {
void addOption(String longName, boolean required, String description, void addOption(String longName, boolean required, String description,
String paramName) { String paramName) {
Option option = OptionBuilder.withArgName(paramName).hasArgs(1).withDescription(description).isRequired(required).create(longName); Option option = Option.builder(longName).argName(paramName)
.hasArg().desc(description).required(required).build();
options.addOption(option); options.addOption(option);
} }
void addArgument(String name, boolean required, String description) { void addArgument(String name, boolean required, String description) {
Option option = OptionBuilder.withArgName(name).hasArgs(1).withDescription(description).isRequired(required).create(); Option option = Option.builder().argName(name)
.hasArg().desc(description).required(required).build();
options.addOption(option); options.addOption(option);
} }

View File

@ -31,7 +31,7 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.GnuParser; import org.apache.commons.cli.GnuParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.ParseException; import org.apache.commons.cli.ParseException;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
@ -105,18 +105,17 @@ public class MiniHadoopClusterManager {
.addOption("jhsport", true, .addOption("jhsport", true,
"JobHistoryServer port (default 0--we choose)") "JobHistoryServer port (default 0--we choose)")
.addOption( .addOption(
OptionBuilder.hasArgs().withArgName("property=value") Option.builder("D").hasArgs().argName("property=value")
.withDescription("Options to pass into configuration object") .desc("Options to pass into configuration object")
.create("D")) .build())
.addOption( .addOption(
OptionBuilder.hasArg().withArgName("path").withDescription( Option.builder("writeConfig").hasArg().argName("path").desc(
"Save configuration to this XML file.").create("writeConfig")) "Save configuration to this XML file.").build())
.addOption( .addOption(
OptionBuilder.hasArg().withArgName("path").withDescription( Option.builder("writeDetails").argName("path").desc(
"Write basic information to this JSON file.").create( "Write basic information to this JSON file.").build())
"writeDetails"))
.addOption( .addOption(
OptionBuilder.withDescription("Prints option help.").create("help")); Option.builder("help").desc("Prints option help.").build());
return options; return options;
} }

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.mapred.uploader;
import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.VisibleForTesting;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.compress.archivers.ArchiveEntry; import org.apache.commons.compress.archivers.ArchiveEntry;
import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream;
@ -484,53 +484,53 @@ public class FrameworkUploader implements Runnable {
@VisibleForTesting @VisibleForTesting
boolean parseArguments(String[] args) throws IOException { boolean parseArguments(String[] args) throws IOException {
Options opts = new Options(); Options opts = new Options();
opts.addOption(OptionBuilder.create("h")); opts.addOption(Option.builder("h").build());
opts.addOption(OptionBuilder.create("help")); opts.addOption(Option.builder("help").build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("input")
.withDescription("Input class path. Defaults to the default classpath.") .desc("Input class path. Defaults to the default classpath.")
.hasArg().create("input")); .hasArg().build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("whitelist")
.withDescription( .desc(
"Regex specifying the full path of jars to include in the" + "Regex specifying the full path of jars to include in the" +
" framework tarball. Default is a hardcoded set of jars" + " framework tarball. Default is a hardcoded set of jars" +
" considered necessary to include") " considered necessary to include")
.hasArg().create("whitelist")); .hasArg().build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("blacklist")
.withDescription( .desc(
"Regex specifying the full path of jars to exclude in the" + "Regex specifying the full path of jars to exclude in the" +
" framework tarball. Default is a hardcoded set of jars" + " framework tarball. Default is a hardcoded set of jars" +
" considered unnecessary to include") " considered unnecessary to include")
.hasArg().create("blacklist")); .hasArg().build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("fs")
.withDescription( .desc(
"Target file system to upload to." + "Target file system to upload to." +
" Example: hdfs://foo.com:8020") " Example: hdfs://foo.com:8020")
.hasArg().create("fs")); .hasArg().build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("target")
.withDescription( .desc(
"Target file to upload to with a reference name." + "Target file to upload to with a reference name." +
" Example: /usr/mr-framework.tar.gz#mr-framework") " Example: /usr/mr-framework.tar.gz#mr-framework")
.hasArg().create("target")); .hasArg().build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("initialReplication")
.withDescription( .desc(
"Desired initial replication count. Default 3.") "Desired initial replication count. Default 3.")
.hasArg().create("initialReplication")); .hasArg().build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("finalReplication")
.withDescription( .desc(
"Desired final replication count. Default 10.") "Desired final replication count. Default 10.")
.hasArg().create("finalReplication")); .hasArg().build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("acceptableReplication")
.withDescription( .desc(
"Desired acceptable replication count. Default 9.") "Desired acceptable replication count. Default 9.")
.hasArg().create("acceptableReplication")); .hasArg().build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("timeout")
.withDescription( .desc(
"Desired timeout for the acceptable" + "Desired timeout for the acceptable" +
" replication in seconds. Default 10") " replication in seconds. Default 10")
.hasArg().create("timeout")); .hasArg().build());
opts.addOption(OptionBuilder opts.addOption(Option.builder("nosymlink")
.withDescription("Ignore symlinks into the same directory") .desc("Ignore symlinks into the same directory")
.create("nosymlink")); .build());
GenericOptionsParser parser = new GenericOptionsParser(opts, args); GenericOptionsParser parser = new GenericOptionsParser(opts, args);
if (parser.getCommandLine().hasOption("help") || if (parser.getCommandLine().hasOption("help") ||
parser.getCommandLine().hasOption("h")) { parser.getCommandLine().hasOption("h")) {

View File

@ -113,7 +113,7 @@
<!-- Apache Commons dependencies --> <!-- Apache Commons dependencies -->
<commons-beanutils.version>1.9.4</commons-beanutils.version> <commons-beanutils.version>1.9.4</commons-beanutils.version>
<commons-cli.version>1.2</commons-cli.version> <commons-cli.version>1.5.0</commons-cli.version>
<commons-codec.version>1.15</commons-codec.version> <commons-codec.version>1.15</commons-codec.version>
<commons-collections.version>3.2.2</commons-collections.version> <commons-collections.version>3.2.2</commons-collections.version>
<commons-compress.version>1.21</commons-compress.version> <commons-compress.version>1.21</commons-compress.version>

View File

@ -21,7 +21,7 @@ import java.net.URI;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.OptionBuilder; import org.apache.commons.cli.Option;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -65,24 +65,25 @@ public class GenerateBlockImagesDriver extends Configured implements Tool {
public int run(String[] args) throws Exception { public int run(String[] args) throws Exception {
Options options = new Options(); Options options = new Options();
options.addOption("h", "help", false, "Shows this message"); options.addOption("h", "help", false, "Shows this message");
options.addOption(OptionBuilder.withArgName("Input path of the XML fsImage") options.addOption(Option.builder(FSIMAGE_INPUT_PATH_ARG)
.hasArg().isRequired(true) .argName("Input path of the XML fsImage")
.withDescription("Input path to the Hadoop fsImage XML file (required)") .hasArg().required(true)
.create(FSIMAGE_INPUT_PATH_ARG)); .desc("Input path to the Hadoop fsImage XML file (required)")
options.addOption(OptionBuilder.withArgName("BlockImage output directory") .build());
.hasArg().isRequired(true) options.addOption(Option.builder(BLOCK_IMAGE_OUTPUT_ARG).argName("BlockImage output directory")
.withDescription("Directory where the generated files containing the " .hasArg().required(true)
.desc("Directory where the generated files containing the "
+ "block listing for each DataNode should be stored (required)") + "block listing for each DataNode should be stored (required)")
.create(BLOCK_IMAGE_OUTPUT_ARG)); .build());
options.addOption(OptionBuilder.withArgName("Number of reducers").hasArg() options.addOption(Option.builder(NUM_REDUCERS_ARG).argName("Number of reducers").hasArg()
.isRequired(false) .required(false)
.withDescription( .desc(
"Number of reducers for this job (defaults to number of datanodes)") "Number of reducers for this job (defaults to number of datanodes)")
.create(NUM_REDUCERS_ARG)); .build());
options.addOption(OptionBuilder.withArgName("Number of datanodes").hasArg() options.addOption(Option.builder(NUM_DATANODES_ARG).argName("Number of datanodes").hasArg()
.isRequired(true) .required(true)
.withDescription("Number of DataNodes to create blocks for (required)") .desc("Number of DataNodes to create blocks for (required)")
.create(NUM_DATANODES_ARG)); .build());
CommandLineParser parser = new PosixParser(); CommandLineParser parser = new PosixParser();
CommandLine cli = parser.parse(options, args); CommandLine cli = parser.parse(options, args);

View File

@ -26,7 +26,6 @@ import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.HelpFormatter; import org.apache.commons.cli.HelpFormatter;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.OptionGroup; import org.apache.commons.cli.OptionGroup;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.apache.commons.cli.PosixParser; import org.apache.commons.cli.PosixParser;
@ -64,28 +63,28 @@ public class WorkloadDriver extends Configured implements Tool {
+ " argument to show help for a specific mapper class."); + " argument to show help for a specific mapper class.");
Options options = new Options(); Options options = new Options();
options.addOption(helpOption); options.addOption(helpOption);
options.addOption(OptionBuilder.withArgName("NN URI").hasArg() options.addOption(Option.builder(NN_URI).argName("NN URI").hasArg()
.withDescription("URI of the NameNode under test").isRequired() .desc("URI of the NameNode under test").required()
.create(NN_URI)); .build());
OptionGroup startTimeOptions = new OptionGroup(); OptionGroup startTimeOptions = new OptionGroup();
startTimeOptions.addOption(OptionBuilder.withArgName("Start Timestamp") startTimeOptions.addOption(Option.builder(START_TIMESTAMP_MS).argName("Start Timestamp")
.hasArg().withDescription("Mapper start UTC timestamp in ms") .hasArg().desc("Mapper start UTC timestamp in ms")
.create(START_TIMESTAMP_MS)); .build());
startTimeOptions startTimeOptions
.addOption(OptionBuilder.withArgName("Start Time Offset").hasArg() .addOption(Option.builder(START_TIME_OFFSET_DEFAULT).argName("Start Time Offset").hasArg()
.withDescription("Mapper start time as an offset from current " .desc("Mapper start time as an offset from current "
+ "time. Human-readable formats accepted, e.g. 10m (default " + "time. Human-readable formats accepted, e.g. 10m (default "
+ START_TIME_OFFSET_DEFAULT + ").") + START_TIME_OFFSET_DEFAULT + ").")
.create(START_TIME_OFFSET)); .build());
options.addOptionGroup(startTimeOptions); options.addOptionGroup(startTimeOptions);
Option mapperClassOption = OptionBuilder.withArgName("Mapper ClassName") Option mapperClassOption = Option.builder(MAPPER_CLASS_NAME).argName("Mapper ClassName")
.hasArg() .hasArg()
.withDescription("Class name of the mapper; must be a WorkloadMapper " .desc("Class name of the mapper; must be a WorkloadMapper "
+ "subclass. Mappers supported currently: \n" + "subclass. Mappers supported currently: \n"
+ "1. AuditReplayMapper \n" + "1. AuditReplayMapper \n"
+ "2. CreateFileMapper \n" + "2. CreateFileMapper \n"
+ "Fully specified class names are also supported.") + "Fully specified class names are also supported.")
.isRequired().create(MAPPER_CLASS_NAME); .required().build();
options.addOption(mapperClassOption); options.addOption(mapperClassOption);
Options helpOptions = new Options(); Options helpOptions = new Options();

View File

@ -34,7 +34,6 @@ import org.apache.commons.cli.BasicParser;
import org.apache.commons.cli.CommandLine; import org.apache.commons.cli.CommandLine;
import org.apache.commons.cli.CommandLineParser; import org.apache.commons.cli.CommandLineParser;
import org.apache.commons.cli.Option; import org.apache.commons.cli.Option;
import org.apache.commons.cli.OptionBuilder;
import org.apache.commons.cli.Options; import org.apache.commons.cli.Options;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -382,16 +381,17 @@ public class StreamJob implements Tool {
private Option createOption(String name, String desc, private Option createOption(String name, String desc,
String argName, int max, boolean required){ String argName, int max, boolean required){
return OptionBuilder return Option.builder(name)
.withArgName(argName) .argName(argName)
.hasArgs(max) .hasArgs()
.withDescription(desc) .numberOfArgs(max)
.isRequired(required) .desc(desc)
.create(name); .required(required)
.build();
} }
private Option createBoolOption(String name, String desc){ private Option createBoolOption(String name, String desc){
return OptionBuilder.withDescription(desc).create(name); return Option.builder(name).desc(desc).build();
} }
private void validate(final Path path) throws IOException { private void validate(final Path path) throws IOException {