HDFS-3305. GetImageServlet should consider SBN a valid requestor in a secure HA setup. Contributed by Aaron T. Myers.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1328115 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Aaron Myers 2012-04-19 20:43:03 +00:00
parent 8d09ab0da2
commit 49ed783a3a
4 changed files with 92 additions and 7 deletions

View File

@ -546,6 +546,9 @@ Release 2.0.0 - UNRELEASED
HDFS-891. DataNode no longer needs to check for dfs.network.script. HDFS-891. DataNode no longer needs to check for dfs.network.script.
(harsh via eli) (harsh via eli)
HDFS-3305. GetImageServlet should consider SBN a valid requestor in a
secure HA setup. (atm)
BREAKDOWN OF HDFS-1623 SUBTASKS BREAKDOWN OF HDFS-1623 SUBTASKS
HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd) HDFS-2179. Add fencing framework and mechanisms for NameNode HA. (todd)

View File

@ -35,6 +35,8 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HAUtil;
import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.common.StorageInfo;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
@ -44,6 +46,7 @@ import org.apache.hadoop.io.MD5Hash;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
/** /**
@ -218,26 +221,44 @@ public class GetImageServlet extends HttpServlet {
return throttler; return throttler;
} }
protected boolean isValidRequestor(String remoteUser, Configuration conf) @VisibleForTesting
static boolean isValidRequestor(String remoteUser, Configuration conf)
throws IOException { throws IOException {
if(remoteUser == null) { // This really shouldn't happen... if(remoteUser == null) { // This really shouldn't happen...
LOG.warn("Received null remoteUser while authorizing access to getImage servlet"); LOG.warn("Received null remoteUser while authorizing access to getImage servlet");
return false; return false;
} }
String[] validRequestors = { Set<String> validRequestors = new HashSet<String>();
validRequestors.add(
SecurityUtil.getServerPrincipal(conf SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), NameNode .get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), NameNode
.getAddress(conf).getHostName()), .getAddress(conf).getHostName()));
validRequestors.add(
SecurityUtil.getServerPrincipal(conf SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), NameNode .get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), NameNode
.getAddress(conf).getHostName()), .getAddress(conf).getHostName()));
validRequestors.add(
SecurityUtil.getServerPrincipal(conf SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY), .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
SecondaryNameNode.getHttpAddress(conf).getHostName()), SecondaryNameNode.getHttpAddress(conf).getHostName()));
validRequestors.add(
SecurityUtil.getServerPrincipal(conf SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY), .get(DFSConfigKeys.DFS_SECONDARY_NAMENODE_USER_NAME_KEY),
SecondaryNameNode.getHttpAddress(conf).getHostName()) }; SecondaryNameNode.getHttpAddress(conf).getHostName()));
if (HAUtil.isHAEnabled(conf, DFSUtil.getNamenodeNameServiceId(conf))) {
Configuration otherNnConf = HAUtil.getConfForOtherNode(conf);
validRequestors.add(
SecurityUtil.getServerPrincipal(otherNnConf
.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY),
NameNode.getAddress(otherNnConf).getHostName()));
validRequestors.add(
SecurityUtil.getServerPrincipal(otherNnConf
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
NameNode.getAddress(otherNnConf).getHostName()));
}
for(String v : validRequestors) { for(String v : validRequestors) {
if(v != null && v.equals(remoteUser)) { if(v != null && v.equals(remoteUser)) {

View File

@ -171,7 +171,8 @@ public class NameNode {
DFS_NAMENODE_BACKUP_ADDRESS_KEY, DFS_NAMENODE_BACKUP_ADDRESS_KEY,
DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY, DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY, DFS_NAMENODE_BACKUP_SERVICE_RPC_ADDRESS_KEY,
DFS_HA_FENCE_METHODS_KEY DFS_HA_FENCE_METHODS_KEY,
DFS_NAMENODE_USER_NAME_KEY
}; };
public long getProtocolVersion(String protocol, public long getProtocolVersion(String protocol,

View File

@ -0,0 +1,60 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.*;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.junit.Test;
public class TestGetImageServlet {
@Test
public void testIsValidRequestorWithHa() throws IOException {
Configuration conf = new HdfsConfiguration();
// Set up generic HA configs.
conf.set(DFSConfigKeys.DFS_FEDERATION_NAMESERVICES, "ns1");
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX,
"ns1"), "nn1,nn2");
// Set up NN1 HA configs.
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY,
"ns1", "nn1"), "host1:1234");
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY,
"ns1", "nn1"), "hdfs/_HOST@TEST-REALM.COM");
// Set up NN2 HA configs.
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_NAMENODE_RPC_ADDRESS_KEY,
"ns1", "nn2"), "host2:1234");
conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY,
"ns1", "nn2"), "hdfs/_HOST@TEST-REALM.COM");
// Initialize this conf object as though we're running on NN1.
NameNode.initializeGenericKeys(conf, "ns1", "nn1");
// Make sure that NN2 is considered a valid fsimage/edits requestor.
assertTrue(GetImageServlet.isValidRequestor("hdfs/host2@TEST-REALM.COM",
conf));
}
}