HDFS-5582. hdfs getconf -excludeFile or -includeFile always failed (satish via cmccabe)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1554295 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c179d58eee
commit
e596f17734
|
@ -1019,6 +1019,9 @@ Release 2.3.0 - UNRELEASED
|
|||
HDFS-5661. Browsing FileSystem via web ui, should use datanode's fqdn instead of ip
|
||||
address. (Benoy Antony via jing9)
|
||||
|
||||
HDFS-5582. hdfs getconf -excludeFile or -includeFile always failed (sathish
|
||||
via cmccabe)
|
||||
|
||||
Release 2.2.0 - 2013-10-13
|
||||
|
||||
INCOMPATIBLE CHANGES
|
||||
|
|
|
@ -29,6 +29,7 @@ import java.util.Map;
|
|||
import org.apache.hadoop.HadoopIllegalArgumentException;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.conf.Configured;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.DFSUtil;
|
||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||
import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
|
||||
|
@ -85,9 +86,9 @@ public class GetConf extends Configured implements Tool {
|
|||
map.put(BACKUP.getName().toLowerCase(),
|
||||
new BackupNodesCommandHandler());
|
||||
map.put(INCLUDE_FILE.getName().toLowerCase(),
|
||||
new CommandHandler("DFSConfigKeys.DFS_HOSTS"));
|
||||
new CommandHandler(DFSConfigKeys.DFS_HOSTS));
|
||||
map.put(EXCLUDE_FILE.getName().toLowerCase(),
|
||||
new CommandHandler("DFSConfigKeys.DFS_HOSTS_EXCLUDE"));
|
||||
new CommandHandler(DFSConfigKeys.DFS_HOSTS_EXCLUDE));
|
||||
map.put(NNRPCADDRESSES.getName().toLowerCase(),
|
||||
new NNRpcAddressesCommandHandler());
|
||||
map.put(CONFKEY.getName().toLowerCase(),
|
||||
|
|
|
@ -33,10 +33,15 @@ import java.io.PrintStream;
|
|||
import java.net.InetSocketAddress;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.DFSUtil;
|
||||
import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress;
|
||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||
|
@ -55,7 +60,7 @@ public class TestGetConf {
|
|||
enum TestType {
|
||||
NAMENODE, BACKUP, SECONDARY, NNRPCADDRESSES
|
||||
}
|
||||
|
||||
FileSystem localFileSys;
|
||||
/** Setup federation nameServiceIds in the configuration */
|
||||
private void setupNameServices(HdfsConfiguration conf, int nameServiceIdCount) {
|
||||
StringBuilder nsList = new StringBuilder();
|
||||
|
@ -379,4 +384,70 @@ public class TestGetConf {
|
|||
}
|
||||
}
|
||||
}
|
||||
@Test
|
||||
public void TestGetConfExcludeCommand() throws Exception{
|
||||
HdfsConfiguration conf = new HdfsConfiguration();
|
||||
// Set up the hosts/exclude files.
|
||||
localFileSys = FileSystem.getLocal(conf);
|
||||
Path workingDir = localFileSys.getWorkingDirectory();
|
||||
Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/");
|
||||
Path hostsFile = new Path(dir, "hosts");
|
||||
Path excludeFile = new Path(dir, "exclude");
|
||||
|
||||
// Setup conf
|
||||
conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath());
|
||||
conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath());
|
||||
writeConfigFile(hostsFile, null);
|
||||
writeConfigFile(excludeFile, null);
|
||||
String[] args = {"-excludeFile"};
|
||||
String ret = runTool(conf, args, true);
|
||||
assertEquals(excludeFile.toUri().getPath(),ret.trim());
|
||||
cleanupFile(localFileSys, excludeFile.getParent());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void TestGetConfIncludeCommand() throws Exception{
|
||||
HdfsConfiguration conf = new HdfsConfiguration();
|
||||
// Set up the hosts/exclude files.
|
||||
localFileSys = FileSystem.getLocal(conf);
|
||||
Path workingDir = localFileSys.getWorkingDirectory();
|
||||
Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/");
|
||||
Path hostsFile = new Path(dir, "hosts");
|
||||
Path excludeFile = new Path(dir, "exclude");
|
||||
|
||||
// Setup conf
|
||||
conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath());
|
||||
conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath());
|
||||
writeConfigFile(hostsFile, null);
|
||||
writeConfigFile(excludeFile, null);
|
||||
String[] args = {"-includeFile"};
|
||||
String ret = runTool(conf, args, true);
|
||||
assertEquals(hostsFile.toUri().getPath(),ret.trim());
|
||||
cleanupFile(localFileSys, excludeFile.getParent());
|
||||
}
|
||||
|
||||
private void writeConfigFile(Path name, ArrayList<String> nodes)
|
||||
throws IOException {
|
||||
// delete if it already exists
|
||||
if (localFileSys.exists(name)) {
|
||||
localFileSys.delete(name, true);
|
||||
}
|
||||
|
||||
FSDataOutputStream stm = localFileSys.create(name);
|
||||
|
||||
if (nodes != null) {
|
||||
for (Iterator<String> it = nodes.iterator(); it.hasNext();) {
|
||||
String node = it.next();
|
||||
stm.writeBytes(node);
|
||||
stm.writeBytes("\n");
|
||||
}
|
||||
}
|
||||
stm.close();
|
||||
}
|
||||
|
||||
private void cleanupFile(FileSystem fileSys, Path name) throws IOException {
|
||||
assertTrue(fileSys.exists(name));
|
||||
fileSys.delete(name, true);
|
||||
assertTrue(!fileSys.exists(name));
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue