diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 7597f49129b..8822e366655 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1019,6 +1019,9 @@ Release 2.3.0 - UNRELEASED HDFS-5661. Browsing FileSystem via web ui, should use datanode's fqdn instead of ip address. (Benoy Antony via jing9) + HDFS-5582. hdfs getconf -excludeFile or -includeFile always failed (sathish + via cmccabe) + Release 2.2.0 - 2013-10-13 INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java index 778ac59ee25..92a3864a675 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/GetConf.java @@ -29,6 +29,7 @@ import java.util.Map; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress; @@ -85,9 +86,9 @@ public class GetConf extends Configured implements Tool { map.put(BACKUP.getName().toLowerCase(), new BackupNodesCommandHandler()); map.put(INCLUDE_FILE.getName().toLowerCase(), - new CommandHandler("DFSConfigKeys.DFS_HOSTS")); + new CommandHandler(DFSConfigKeys.DFS_HOSTS)); map.put(EXCLUDE_FILE.getName().toLowerCase(), - new CommandHandler("DFSConfigKeys.DFS_HOSTS_EXCLUDE")); + new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE)); map.put(NNRPCADDRESSES.getName().toLowerCase(), new NNRpcAddressesCommandHandler()); map.put(CONFKEY.getName().toLowerCase(), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java index 7a17cea2d65..80b176f4bd5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java @@ -33,10 +33,15 @@ import java.io.PrintStream; import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; +import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.StringTokenizer; +import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.Path; +import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress; import org.apache.hadoop.hdfs.HdfsConfiguration; @@ -55,7 +60,7 @@ public class TestGetConf { enum TestType { NAMENODE, BACKUP, SECONDARY, NNRPCADDRESSES } - + FileSystem localFileSys; /** Setup federation nameServiceIds in the configuration */ private void setupNameServices(HdfsConfiguration conf, int nameServiceIdCount) { StringBuilder nsList = new StringBuilder(); @@ -379,4 +384,70 @@ public class TestGetConf { } } } + @Test + public void TestGetConfExcludeCommand() throws Exception{ + HdfsConfiguration conf = new HdfsConfiguration(); + // Set up the hosts/exclude files. + localFileSys = FileSystem.getLocal(conf); + Path workingDir = localFileSys.getWorkingDirectory(); + Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/"); + Path hostsFile = new Path(dir, "hosts"); + Path excludeFile = new Path(dir, "exclude"); + + // Setup conf + conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath()); + conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath()); + writeConfigFile(hostsFile, null); + writeConfigFile(excludeFile, null); + String[] args = {"-excludeFile"}; + String ret = runTool(conf, args, true); + assertEquals(excludeFile.toUri().getPath(),ret.trim()); + cleanupFile(localFileSys, excludeFile.getParent()); + } + + @Test + public void TestGetConfIncludeCommand() throws Exception{ + HdfsConfiguration conf = new HdfsConfiguration(); + // Set up the hosts/exclude files. + localFileSys = FileSystem.getLocal(conf); + Path workingDir = localFileSys.getWorkingDirectory(); + Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/"); + Path hostsFile = new Path(dir, "hosts"); + Path excludeFile = new Path(dir, "exclude"); + + // Setup conf + conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath()); + conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath()); + writeConfigFile(hostsFile, null); + writeConfigFile(excludeFile, null); + String[] args = {"-includeFile"}; + String ret = runTool(conf, args, true); + assertEquals(hostsFile.toUri().getPath(),ret.trim()); + cleanupFile(localFileSys, excludeFile.getParent()); + } + + private void writeConfigFile(Path name, ArrayList nodes) + throws IOException { + // delete if it already exists + if (localFileSys.exists(name)) { + localFileSys.delete(name, true); + } + + FSDataOutputStream stm = localFileSys.create(name); + + if (nodes != null) { + for (Iterator it = nodes.iterator(); it.hasNext();) { + String node = it.next(); + stm.writeBytes(node); + stm.writeBytes("\n"); + } + } + stm.close(); + } + + private void cleanupFile(FileSystem fileSys, Path name) throws IOException { + assertTrue(fileSys.exists(name)); + fileSys.delete(name, true); + assertTrue(!fileSys.exists(name)); + } }