From 2ab67dce1103f9ec2c1d0630217a0501a22009ce Mon Sep 17 00:00:00 2001 From: Todd Lipcon Date: Sat, 7 Apr 2012 00:15:30 +0000 Subject: [PATCH] HDFS-3226. Allow GetConf tool to print arbitrary keys. Contributed by Todd Lipcon. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1310648 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 2 + .../org/apache/hadoop/hdfs/tools/GetConf.java | 71 ++++++++++++++----- .../apache/hadoop/hdfs/tools/TestGetConf.java | 35 +++++++-- 3 files changed, 88 insertions(+), 20 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 78dd63fbae6..dc3a6bbef0a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -333,6 +333,8 @@ Release 2.0.0 - UNRELEASED HDFS-3050. rework OEV to share more code with the NameNode. (Colin Patrick McCabe via eli) + HDFS-3226. Allow GetConf tool to print arbitrary keys (todd) + OPTIMIZATIONS HDFS-3024. Improve performance of stringification in addStoredBlock (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java index e3a67edebc9..2546873e1e3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java @@ -21,10 +21,12 @@ import java.io.PrintStream; import java.net.InetSocketAddress; import java.security.PrivilegedExceptionAction; +import java.util.Arrays; import java.util.HashMap; import java.util.List; import java.util.Map; +import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.hdfs.DFSUtil; @@ -70,7 +72,8 @@ enum Command { EXCLUDE_FILE("-excludeFile", "gets the exclude file path that defines the datanodes " + "that need to decommissioned."), - NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses"); + NNRPCADDRESSES("-nnRpcAddresses", "gets the namenode rpc addresses"), + CONFKEY("-confKey [key]", "gets a specific key from the configuration"); private static Map map; static { @@ -87,6 +90,8 @@ enum Command { new CommandHandler("DFSConfigKeys.DFS_HOSTS_EXCLUDE")); map.put(NNRPCADDRESSES.getName().toLowerCase(), new NNRpcAddressesCommandHandler()); + map.put(CONFKEY.getName().toLowerCase(), + new PrintConfKeyCommandHandler()); } private final String cmd; @@ -98,6 +103,10 @@ enum Command { } public String getName() { + return cmd.split(" ")[0]; + } + + public String getUsage() { return cmd; } @@ -105,8 +114,8 @@ public String getDescription() { return description; } - public static CommandHandler getHandler(String name) { - return map.get(name.toLowerCase()); + public static CommandHandler getHandler(String cmd) { + return map.get(cmd.toLowerCase()); } } @@ -118,7 +127,7 @@ public static CommandHandler getHandler(String name) { StringBuilder usage = new StringBuilder(DESCRIPTION); usage.append("\nhadoop getconf \n"); for (Command cmd : Command.values()) { - usage.append("\t[" + cmd.getName() + "]\t\t\t" + cmd.getDescription() + usage.append("\t[" + cmd.getUsage() + "]\t\t\t" + cmd.getDescription() + "\n"); } USAGE = usage.toString(); @@ -128,7 +137,7 @@ public static CommandHandler getHandler(String name) { * Handler to return value for key corresponding to the {@link Command} */ static class CommandHandler { - final String key; // Configuration key to lookup + String key; // Configuration key to lookup CommandHandler() { this(null); @@ -138,18 +147,30 @@ static class CommandHandler { this.key = key; } - final int doWork(GetConf tool) { + final int doWork(GetConf tool, String[] args) { try { - return doWorkInternal(tool); + checkArgs(args); + + return doWorkInternal(tool, args); } catch (Exception e) { tool.printError(e.getMessage()); } return -1; } + + protected void checkArgs(String args[]) { + if (args.length > 0) { + throw new HadoopIllegalArgumentException( + "Did not expect argument: " + args[0]); + } + } + - /** Method to be overridden by sub classes for specific behavior */ - int doWorkInternal(GetConf tool) throws Exception { - String value = tool.getConf().get(key); + /** Method to be overridden by sub classes for specific behavior + * @param args */ + int doWorkInternal(GetConf tool, String[] args) throws Exception { + + String value = tool.getConf().getTrimmed(key); if (value != null) { tool.printOut(value); return 0; @@ -164,7 +185,7 @@ int doWorkInternal(GetConf tool) throws Exception { */ static class NameNodesCommandHandler extends CommandHandler { @Override - int doWorkInternal(GetConf tool) throws IOException { + int doWorkInternal(GetConf tool, String []args) throws IOException { tool.printMap(DFSUtil.getNNServiceRpcAddresses(tool.getConf())); return 0; } @@ -175,7 +196,7 @@ int doWorkInternal(GetConf tool) throws IOException { */ static class BackupNodesCommandHandler extends CommandHandler { @Override - public int doWorkInternal(GetConf tool) throws IOException { + public int doWorkInternal(GetConf tool, String []args) throws IOException { tool.printMap(DFSUtil.getBackupNodeAddresses(tool.getConf())); return 0; } @@ -186,7 +207,7 @@ public int doWorkInternal(GetConf tool) throws IOException { */ static class SecondaryNameNodesCommandHandler extends CommandHandler { @Override - public int doWorkInternal(GetConf tool) throws IOException { + public int doWorkInternal(GetConf tool, String []args) throws IOException { tool.printMap(DFSUtil.getSecondaryNameNodeAddresses(tool.getConf())); return 0; } @@ -199,7 +220,7 @@ public int doWorkInternal(GetConf tool) throws IOException { */ static class NNRpcAddressesCommandHandler extends CommandHandler { @Override - public int doWorkInternal(GetConf tool) throws IOException { + public int doWorkInternal(GetConf tool, String []args) throws IOException { Configuration config = tool.getConf(); List cnnlist = DFSUtil.flattenAddressMap( DFSUtil.getNNServiceRpcAddresses(config)); @@ -215,6 +236,23 @@ public int doWorkInternal(GetConf tool) throws IOException { } } + static class PrintConfKeyCommandHandler extends CommandHandler { + @Override + protected void checkArgs(String[] args) { + if (args.length != 1) { + throw new HadoopIllegalArgumentException( + "usage: " + Command.CONFKEY.getUsage()); + } + } + + @Override + int doWorkInternal(GetConf tool, String[] args) throws Exception { + this.key = args[0]; + System.err.println("key: " + key); + return super.doWorkInternal(tool, args); + } + } + private final PrintStream out; // Stream for printing command output private final PrintStream err; // Stream for printing error @@ -260,10 +298,11 @@ private void printUsage() { * @return return status of the command */ private int doWork(String[] args) { - if (args.length == 1) { + if (args.length >= 1) { CommandHandler handler = Command.getHandler(args[0]); if (handler != null) { - return handler.doWork(this); + return handler.doWork(this, + Arrays.copyOfRange(args, 1, args.length)); } } printUsage(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java index 97be2b843d6..93de1d2e5c4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java @@ -42,6 +42,8 @@ import org.apache.hadoop.util.ToolRunner; import org.junit.Test; +import com.google.common.base.Joiner; + /** * Test for {@link GetConf} */ @@ -117,7 +119,12 @@ private String runTool(HdfsConfiguration conf, String[] args, boolean success) PrintStream out = new PrintStream(o, true); try { int ret = ToolRunner.run(new GetConf(conf, out, out), args); - assertEquals(success, ret == 0); + out.flush(); + System.err.println("Output: " + o.toString()); + assertEquals("Expected " + (success?"success":"failure") + + " for args: " + Joiner.on(" ").join(args) + "\n" + + "Output: " + o.toString(), + success, ret == 0); return o.toString(); } finally { o.close(); @@ -222,7 +229,9 @@ public void testEmptyConf() throws Exception { getAddressListFromTool(TestType.SECONDARY, conf, false); getAddressListFromTool(TestType.NNRPCADDRESSES, conf, false); for (Command cmd : Command.values()) { - CommandHandler handler = Command.getHandler(cmd.getName()); + String arg = cmd.getName(); + CommandHandler handler = Command.getHandler(arg); + assertNotNull("missing handler: " + cmd, handler); if (handler.key != null) { // First test with configuration missing the required key String[] args = {handler.key}; @@ -319,18 +328,36 @@ public void testFederation() throws Exception { verifyAddresses(conf, TestType.SECONDARY, false, secondaryAddresses); verifyAddresses(conf, TestType.NNRPCADDRESSES, true, nnAddresses); } + + @Test + public void testGetSpecificKey() throws Exception { + HdfsConfiguration conf = new HdfsConfiguration(); + conf.set("mykey", " myval "); + String[] args = {"-confKey", "mykey"}; + assertTrue(runTool(conf, args, true).equals("myval\n")); + } + + @Test + public void testExtraArgsThrowsError() throws Exception { + HdfsConfiguration conf = new HdfsConfiguration(); + conf.set("mykey", "myval"); + String[] args = {"-namenodes", "unexpected-arg"}; + assertTrue(runTool(conf, args, false).contains( + "Did not expect argument: unexpected-arg")); + } /** * Tests commands other than {@link Command#NAMENODE}, {@link Command#BACKUP}, * {@link Command#SECONDARY} and {@link Command#NNRPCADDRESSES} */ + @Test public void testTool() throws Exception { HdfsConfiguration conf = new HdfsConfiguration(false); for (Command cmd : Command.values()) { CommandHandler handler = Command.getHandler(cmd.getName()); - if (handler.key != null) { + if (handler.key != null && !"-confKey".equals(cmd.getName())) { // Add the key to the conf and ensure tool returns the right value - String[] args = {handler.key}; + String[] args = {cmd.getName()}; conf.set(handler.key, "value"); assertTrue(runTool(conf, args, true).contains("value")); }