diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index c31de2e8dc9..53a74e028ef 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -1695,6 +1695,9 @@ Release 2.8.0 - UNRELEASED HDFS-9701. DN may deadlock when hot-swapping under load. (Xiao Chen via lei) + HDFS-9718. HAUtil#getConfForOtherNodes should unset independent generic keys + before initialize (DENG FEI via vinayakumarb) + Release 2.7.3 - UNRELEASED INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HAUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HAUtil.java index 7800596f0a7..91853953f38 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HAUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/HAUtil.java @@ -18,7 +18,16 @@ package org.apache.hadoop.hdfs; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HA_NAMENODE_ID_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTPS_BIND_HOST_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_HTTP_BIND_HOST_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LIFELINE_RPC_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_RPC_BIND_HOST_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_BIND_HOST_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY; import java.io.IOException; @@ -64,6 +73,19 @@ public class HAUtil { private static final DelegationTokenSelector tokenSelector = new DelegationTokenSelector(); + private static final String[] HA_SPECIAL_INDEPENDENT_KEYS = new String []{ + DFS_NAMENODE_RPC_ADDRESS_KEY, + DFS_NAMENODE_RPC_BIND_HOST_KEY, + DFS_NAMENODE_LIFELINE_RPC_ADDRESS_KEY, + DFS_NAMENODE_LIFELINE_RPC_BIND_HOST_KEY, + DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, + DFS_NAMENODE_SERVICE_RPC_BIND_HOST_KEY, + DFS_NAMENODE_HTTP_ADDRESS_KEY, + DFS_NAMENODE_HTTPS_ADDRESS_KEY, + DFS_NAMENODE_HTTP_BIND_HOST_KEY, + DFS_NAMENODE_HTTPS_BIND_HOST_KEY + }; + private HAUtil() { /* Hidden constructor */ } /** @@ -188,6 +210,10 @@ public static Configuration getConfForOtherNode( // Look up the address of the active NN. Configuration confForOtherNode = new Configuration(myConf); + // unset independent properties + for (String idpKey : HA_SPECIAL_INDEPENDENT_KEYS) { + confForOtherNode.unset(idpKey); + } NameNode.initializeGenericKeys(confForOtherNode, nsId, otherNn); return confForOtherNode; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestHAConfiguration.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestHAConfiguration.java index c4a29883eed..90ccd86a184 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestHAConfiguration.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/ha/TestHAConfiguration.java @@ -20,6 +20,8 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_EDITS_DIR_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_SHARED_EDITS_DIR_KEY; import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertNull; +import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; import java.io.IOException; @@ -30,6 +32,7 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode; @@ -127,4 +130,23 @@ public void testSecondaryNameNodeDoesNotStart() throws IOException { "Cannot use SecondaryNameNode in an HA cluster", ioe); } } + + @Test + public void testGetOtherNNGenericConf() throws IOException { + String nsId = "ns1"; + String host1 = "1.2.3.1"; + String host2 = "1.2.3.2"; + Configuration conf = getHAConf(nsId, host1, host2); + conf.set(DFSUtil.addKeySuffixes( + DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY, nsId, "nn1"), host1 + + ":54321"); + conf.set(DFSConfigKeys.DFS_NAMESERVICE_ID, "ns1"); + NameNode.initializeGenericKeys(conf, "ns1", "nn1"); + + Configuration nn2Conf = HAUtil.getConfForOtherNode(conf); + assertEquals(nn2Conf.get(DFSConfigKeys.DFS_HA_NAMENODE_ID_KEY), "nn2"); + assertTrue(!conf.get(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY).equals( + nn2Conf.get(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY))); + assertNull(nn2Conf.get(DFSConfigKeys.DFS_NAMENODE_SERVICE_RPC_ADDRESS_KEY)); + } }