diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 8df39c38be1..18e495b2dfd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -422,6 +422,9 @@ Release 2.0.0 - UNRELEASED HDFS-891. DataNode no longer needs to check for dfs.network.script. (harsh via eli) + HDFS-3305. GetImageServlet should consider SBN a valid requestor in a + secure HA setup. (atm) + BREAKDOWN OF HDFS-1623 SUBTASKS HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java index 76da1540122..14c0a49e1e6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/GetImageServlet.java @@ -35,6 +35,8 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; @@ -44,6 +46,7 @@ import org.apache.hadoop.io.MD5Hash; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.util.StringUtils; +import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; /** @@ -218,26 +221,44 @@ public class GetImageServlet extends HttpServlet { return throttler; } - protected boolean isValidRequestor(String remoteUser, Configuration conf) + @VisibleForTesting + static boolean isValidRequestor(String remoteUser, Configuration conf) throws IOException { if(remoteUser == null) { // This really shouldn't happen... LOG.warn("Received null remoteUser while authorizing access to getImage servlet"); return false; } + + Set validRequestors = new HashSet(); - String[] validRequestors = { + validRequestors.add( SecurityUtil.getServerPrincipal(conf .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), NameNode - .getAddress(conf).getHostName()), + .getAddress(conf).getHostName())); + validRequestors.add( SecurityUtil.getServerPrincipal(conf .get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), NameNode - .getAddress(conf).getHostName()), + .getAddress(conf).getHostName())); + validRequestors.add( SecurityUtil.getServerPrincipal(conf .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY), - SecondaryNameNode.getHttpAddress(conf).getHostName()), + SecondaryNameNode.getHttpAddress(conf).getHostName())); + validRequestors.add( SecurityUtil.getServerPrincipal(conf .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY), - SecondaryNameNode.getHttpAddress(conf).getHostName()) }; + SecondaryNameNode.getHttpAddress(conf).getHostName())); + + if (HAUtil.isHAEnabled(conf, DFSUtil.getNamenodeNameServiceId(conf))) { + Configuration otherNnConf = HAUtil.getConfForOtherNode(conf); + validRequestors.add( + SecurityUtil.getServerPrincipal(otherNnConf + .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), + NameNode.getAddress(otherNnConf).getHostName())); + validRequestors.add( + SecurityUtil.getServerPrincipal(otherNnConf + .get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), + NameNode.getAddress(otherNnConf).getHostName())); + } for(String v : validRequestors) { if(v != null && v.equals(remoteUser)) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java index 370494c6919..d7a2fb42499 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNode.java @@ -171,7 +171,8 @@ public class NameNode { DFS_NAMENODE_BACKUP_ADDRESS_KEY, DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY, DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY, - DFS_HA_FENCE_METHODS_KEY + DFS_HA_FENCE_METHODS_KEY, + DFS_NAMENODE_USER_NAME_KEY }; public long getProtocolVersion(String protocol, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java new file mode 100644 index 00000000000..c574cb3846a --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestGetImageServlet.java @@ -0,0 +1,60 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.server.namenode; + +import static org.junit.Assert.*; + +import java.io.IOException; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hdfs.DFSConfigKeys; +import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdfs.HdfsConfiguration; +import org.junit.Test; + +public class TestGetImageServlet { + + @Test + public void testIsValidRequestorWithHa() throws IOException { + Configuration conf = new HdfsConfiguration(); + + // Set up generic HA configs. + conf.set(DFSConfigKeys.DFS_FEDERATION_NAMESERVICES, "ns1"); + conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX, + "ns1"), "nn1,nn2"); + + // Set up NN1 HA configs. + conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY, + "ns1", "nn1"), "host1:1234"); + conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, + "ns1", "nn1"), "hdfs/_HOST@TEST-REALM.COM"); + + // Set up NN2 HA configs. + conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY, + "ns1", "nn2"), "host2:1234"); + conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, + "ns1", "nn2"), "hdfs/_HOST@TEST-REALM.COM"); + + // Initialize this conf object as though we're running on NN1. + NameNode.initializeGenericKeys(conf, "ns1", "nn1"); + + // Make sure that NN2 is considered a valid fsimage/edits requestor. + assertTrue(GetImageServlet.isValidRequestor("hdfs/host2@TEST-REALM.COM", + conf)); + } +}