HDFS-2086. If the include hosts list contains host names, after restarting namenode, data nodes registration is denied. Contributed by Tanping Wang.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1139090 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
15e68cb374
commit
42863b9baf
|
@ -766,6 +766,9 @@ Trunk (unreleased changes)
|
||||||
HDFS-1734. 'Chunk size to view' option is not working in Name Node UI.
|
HDFS-1734. 'Chunk size to view' option is not working in Name Node UI.
|
||||||
(Uma Maheswara Rao G via jitendra)
|
(Uma Maheswara Rao G via jitendra)
|
||||||
|
|
||||||
|
HDFS-2086. If the include hosts list contains host names, after restarting
|
||||||
|
namenode, data nodes registration is denied. Contributed by Tanping Wang.
|
||||||
|
|
||||||
Release 0.22.0 - Unreleased
|
Release 0.22.0 - Unreleased
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
|
@ -31,6 +31,7 @@ import java.io.PrintWriter;
|
||||||
import java.lang.management.ManagementFactory;
|
import java.lang.management.ManagementFactory;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
import java.net.UnknownHostException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
@ -3784,9 +3785,19 @@ public class FSNamesystem implements FSConstants, FSNamesystemMBean,
|
||||||
nodes.add(dn);
|
nodes.add(dn);
|
||||||
}
|
}
|
||||||
//Remove any form of the this datanode in include/exclude lists.
|
//Remove any form of the this datanode in include/exclude lists.
|
||||||
mustList.remove(dn.getName());
|
try {
|
||||||
mustList.remove(dn.getHost());
|
InetAddress inet = InetAddress.getByName(dn.getHost());
|
||||||
mustList.remove(dn.getHostName());
|
// compare hostname(:port)
|
||||||
|
mustList.remove(inet.getHostName());
|
||||||
|
mustList.remove(inet.getHostName()+":"+dn.getPort());
|
||||||
|
// compare ipaddress(:port)
|
||||||
|
mustList.remove(inet.getHostAddress().toString());
|
||||||
|
mustList.remove(inet.getHostAddress().toString()+ ":" +dn.getPort());
|
||||||
|
} catch ( UnknownHostException e ) {
|
||||||
|
mustList.remove(dn.getName());
|
||||||
|
mustList.remove(dn.getHost());
|
||||||
|
LOG.warn(e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4031,23 +4042,62 @@ public class FSNamesystem implements FSConstants, FSNamesystemMBean,
|
||||||
*/
|
*/
|
||||||
private boolean inHostsList(DatanodeID node, String ipAddr) {
|
private boolean inHostsList(DatanodeID node, String ipAddr) {
|
||||||
Set<String> hostsList = hostsReader.getHosts();
|
Set<String> hostsList = hostsReader.getHosts();
|
||||||
return (hostsList.isEmpty() ||
|
return checkInList(node, ipAddr, hostsList, false);
|
||||||
(ipAddr != null && hostsList.contains(ipAddr)) ||
|
|
||||||
hostsList.contains(node.getHost()) ||
|
|
||||||
hostsList.contains(node.getName()) ||
|
|
||||||
((node instanceof DatanodeInfo) &&
|
|
||||||
hostsList.contains(((DatanodeInfo)node).getHostName())));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private boolean inExcludedHostsList(DatanodeID node, String ipAddr) {
|
private boolean inExcludedHostsList(DatanodeID node, String ipAddr) {
|
||||||
Set<String> excludeList = hostsReader.getExcludedHosts();
|
Set<String> excludeList = hostsReader.getExcludedHosts();
|
||||||
return ((ipAddr != null && excludeList.contains(ipAddr)) ||
|
return checkInList(node, ipAddr, excludeList, true);
|
||||||
excludeList.contains(node.getHost()) ||
|
|
||||||
excludeList.contains(node.getName()) ||
|
|
||||||
((node instanceof DatanodeInfo) &&
|
|
||||||
excludeList.contains(((DatanodeInfo)node).getHostName())));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if the given node (of DatanodeID or ipAddress) is in the (include or
|
||||||
|
* exclude) list. If ipAddress in null, check only based upon the given
|
||||||
|
* DatanodeID. If ipAddress is not null, the ipAddress should refers to the
|
||||||
|
* same host that given DatanodeID refers to.
|
||||||
|
*
|
||||||
|
* @param node, DatanodeID, the host DatanodeID
|
||||||
|
* @param ipAddress, if not null, should refers to the same host
|
||||||
|
* that DatanodeID refers to
|
||||||
|
* @param hostsList, the list of hosts in the include/exclude file
|
||||||
|
* @param isExcludeList, boolean, true if this is the exclude list
|
||||||
|
* @return boolean, if in the list
|
||||||
|
*/
|
||||||
|
private boolean checkInList(DatanodeID node, String ipAddress,
|
||||||
|
Set<String> hostsList, boolean isExcludeList) {
|
||||||
|
InetAddress iaddr = null;
|
||||||
|
try {
|
||||||
|
if (ipAddress != null) {
|
||||||
|
iaddr = InetAddress.getByName(ipAddress);
|
||||||
|
} else {
|
||||||
|
iaddr = InetAddress.getByName(node.getHost());
|
||||||
|
}
|
||||||
|
}catch (UnknownHostException e) {
|
||||||
|
LOG.warn("Unknown host in host list: "+ipAddress);
|
||||||
|
// can't resolve the host name.
|
||||||
|
if (isExcludeList){
|
||||||
|
return true;
|
||||||
|
} else {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if include list is empty, host is in include list
|
||||||
|
if ( (!isExcludeList) && (hostsList.isEmpty()) ){
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return // compare ipaddress(:port)
|
||||||
|
(hostsList.contains(iaddr.getHostAddress().toString()))
|
||||||
|
|| (hostsList.contains(iaddr.getHostAddress().toString() + ":"
|
||||||
|
+ node.getPort()))
|
||||||
|
// compare hostname(:port)
|
||||||
|
|| (hostsList.contains(iaddr.getHostName()))
|
||||||
|
|| (hostsList.contains(iaddr.getHostName() + ":" + node.getPort()))
|
||||||
|
|| ((node instanceof DatanodeInfo) && hostsList
|
||||||
|
.contains(((DatanodeInfo) node).getHostName()));
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Rereads the config to get hosts and exclude list file names.
|
* Rereads the config to get hosts and exclude list file names.
|
||||||
* Rereads the files to update the hosts and exclude lists. It
|
* Rereads the files to update the hosts and exclude lists. It
|
||||||
|
|
|
@ -25,7 +25,9 @@ import java.io.FileInputStream;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.RandomAccessFile;
|
import java.io.RandomAccessFile;
|
||||||
|
import java.net.InetAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
import java.util.ArrayList;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
|
@ -43,18 +45,21 @@ import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.fs.permission.PermissionStatus;
|
import org.apache.hadoop.fs.permission.PermissionStatus;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hdfs.DFSClient;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
|
||||||
import org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction;
|
import org.apache.hadoop.hdfs.protocol.FSConstants.SafeModeAction;
|
||||||
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
|
import org.apache.hadoop.hdfs.server.common.Storage.StorageDirectory;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType;
|
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile;
|
import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeFile;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.io.MD5Hash;
|
import org.apache.hadoop.io.MD5Hash;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
import org.junit.Assert;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Startup and checkpoint tests
|
* Startup and checkpoint tests
|
||||||
|
@ -491,4 +496,89 @@ public class TestStartup extends TestCase {
|
||||||
namenode.join();
|
namenode.join();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This test tests hosts include list contains host names. After namenode
|
||||||
|
* restarts, the still alive datanodes should not have any trouble in getting
|
||||||
|
* registrant again.
|
||||||
|
*/
|
||||||
|
public void testNNRestart() throws IOException, InterruptedException {
|
||||||
|
MiniDFSCluster cluster = null;
|
||||||
|
FileSystem localFileSys;
|
||||||
|
Path hostsFile;
|
||||||
|
Path excludeFile;
|
||||||
|
Configuration conf = new HdfsConfiguration();
|
||||||
|
int HEARTBEAT_INTERVAL = 1; // heartbeat interval in seconds
|
||||||
|
// Set up the hosts/exclude files.
|
||||||
|
localFileSys = FileSystem.getLocal(conf);
|
||||||
|
Path workingDir = localFileSys.getWorkingDirectory();
|
||||||
|
Path dir = new Path(workingDir, "build/test/data/work-dir/restartnn");
|
||||||
|
hostsFile = new Path(dir, "hosts");
|
||||||
|
excludeFile = new Path(dir, "exclude");
|
||||||
|
|
||||||
|
// Setup conf
|
||||||
|
conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath());
|
||||||
|
writeConfigFile(localFileSys, excludeFile, null);
|
||||||
|
conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath());
|
||||||
|
// write into hosts file
|
||||||
|
ArrayList<String>list = new ArrayList<String>();
|
||||||
|
byte b[] = {127, 0, 0, 1};
|
||||||
|
InetAddress inetAddress = InetAddress.getByAddress(b);
|
||||||
|
list.add(inetAddress.getHostName());
|
||||||
|
writeConfigFile(localFileSys, hostsFile, list);
|
||||||
|
int numNameNodes = 1;
|
||||||
|
int numDatanodes = 1;
|
||||||
|
|
||||||
|
try {
|
||||||
|
cluster = new MiniDFSCluster.Builder(conf).numNameNodes(numNameNodes)
|
||||||
|
.numDataNodes(numDatanodes).setupHostsFile(true).build();
|
||||||
|
cluster.waitActive();
|
||||||
|
|
||||||
|
cluster.restartNameNode();
|
||||||
|
NameNode nn = cluster.getNameNode();
|
||||||
|
assertNotNull(nn);
|
||||||
|
Assert.assertTrue(cluster.isDataNodeUp());
|
||||||
|
|
||||||
|
DatanodeInfo[] info = nn.getDatanodeReport(DatanodeReportType.LIVE);
|
||||||
|
for (int i = 0 ; i < 5 && info.length != numDatanodes; i++) {
|
||||||
|
Thread.sleep(HEARTBEAT_INTERVAL * 1000);
|
||||||
|
info = nn.getDatanodeReport(DatanodeReportType.LIVE);
|
||||||
|
}
|
||||||
|
assertEquals("Number of live nodes should be "+numDatanodes, numDatanodes,
|
||||||
|
info.length);
|
||||||
|
|
||||||
|
} catch (IOException e) {
|
||||||
|
fail(StringUtils.stringifyException(e));
|
||||||
|
throw e;
|
||||||
|
} finally {
|
||||||
|
cleanupFile(localFileSys, excludeFile.getParent());
|
||||||
|
if (cluster != null) {
|
||||||
|
cluster.shutdown();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void writeConfigFile(FileSystem localFileSys, Path name,
|
||||||
|
ArrayList<String> nodes) throws IOException {
|
||||||
|
// delete if it already exists
|
||||||
|
if (localFileSys.exists(name)) {
|
||||||
|
localFileSys.delete(name, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (nodes != null) {
|
||||||
|
FSDataOutputStream stm = localFileSys.create(name);
|
||||||
|
for (Iterator<String> it = nodes.iterator(); it.hasNext();) {
|
||||||
|
String node = it.next();
|
||||||
|
stm.writeBytes(node);
|
||||||
|
stm.writeBytes("\n");
|
||||||
|
}
|
||||||
|
stm.close();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void cleanupFile(FileSystem fileSys, Path name) throws IOException {
|
||||||
|
assertTrue(fileSys.exists(name));
|
||||||
|
fileSys.delete(name, true);
|
||||||
|
assertTrue(!fileSys.exists(name));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue