HADOOP-9968 and HADOOP-10448. Merging r1570934 and 1598396 from trunk to branch-2.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1598440 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Arpit Agarwal 2014-05-29 23:38:07 +00:00
parent c19b55ce32
commit d1b7433a62
16 changed files with 509 additions and 231 deletions

View File

@ -60,6 +60,12 @@ Release 2.5.0 - UNRELEASED
HADOOP-10566. Refactor proxyservers out of ProxyUsers. (Benoy Antony via HADOOP-10566. Refactor proxyservers out of ProxyUsers. (Benoy Antony via
Arpit Agarwal) Arpit Agarwal)
HDFS-9968. ProxyUsers does not work with NetGroups. (Benoy Antony via
Devaraj Das)
HADOOP-10448. Support pluggable mechanism to specify proxy user settings.
(Benoy Antony via Arpit Agarwal)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -290,5 +290,8 @@ public class CommonConfigurationKeysPublic {
/** Class to override Sasl Properties for a connection */ /** Class to override Sasl Properties for a connection */
public static final String HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS = public static final String HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS =
"hadoop.security.saslproperties.resolver.class"; "hadoop.security.saslproperties.resolver.class";
/** Class to override Impersonation provider */
public static final String HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS =
"hadoop.security.impersonation.provider.class";
} }

View File

@ -0,0 +1,210 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security.authorize;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
import com.google.common.annotations.VisibleForTesting;
public class DefaultImpersonationProvider implements ImpersonationProvider {
private static final String CONF_HOSTS = ".hosts";
private static final String CONF_USERS = ".users";
private static final String CONF_GROUPS = ".groups";
private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
// list of users, groups and hosts per proxyuser
private Map<String, Collection<String>> proxyUsers =
new HashMap<String, Collection<String>>();
private Map<String, Collection<String>> proxyGroups =
new HashMap<String, Collection<String>>();
private Map<String, Collection<String>> proxyHosts =
new HashMap<String, Collection<String>>();
private Configuration conf;
@Override
public void setConf(Configuration conf) {
this.conf = conf;
// get all the new keys for users
String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS;
Map<String,String> allMatchKeys = conf.getValByRegex(regex);
for(Entry<String, String> entry : allMatchKeys.entrySet()) {
Collection<String> users = StringUtils.getTrimmedStringCollection(entry.getValue());
proxyUsers.put(entry.getKey(), users);
}
// get all the new keys for groups
regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_GROUPS;
allMatchKeys = conf.getValByRegex(regex);
for(Entry<String, String> entry : allMatchKeys.entrySet()) {
Collection<String> groups = StringUtils.getTrimmedStringCollection(entry.getValue());
proxyGroups.put(entry.getKey(), groups);
//cache the groups. This is needed for NetGroups
Groups.getUserToGroupsMappingService(conf).cacheGroupsAdd(
new ArrayList<String>(groups));
}
// now hosts
regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_HOSTS;
allMatchKeys = conf.getValByRegex(regex);
for(Entry<String, String> entry : allMatchKeys.entrySet()) {
proxyHosts.put(entry.getKey(),
StringUtils.getTrimmedStringCollection(entry.getValue()));
}
}
@Override
public Configuration getConf() {
return conf;
}
@Override
public void authorize(UserGroupInformation user,
String remoteAddress) throws AuthorizationException {
if (user.getRealUser() == null) {
return;
}
boolean userAuthorized = false;
boolean ipAuthorized = false;
UserGroupInformation superUser = user.getRealUser();
Collection<String> allowedUsers = proxyUsers.get(
getProxySuperuserUserConfKey(superUser.getShortUserName()));
if (isWildcardList(allowedUsers)) {
userAuthorized = true;
} else if (allowedUsers != null && !allowedUsers.isEmpty()) {
if (allowedUsers.contains(user.getShortUserName())) {
userAuthorized = true;
}
}
if (!userAuthorized){
Collection<String> allowedUserGroups = proxyGroups.get(
getProxySuperuserGroupConfKey(superUser.getShortUserName()));
if (isWildcardList(allowedUserGroups)) {
userAuthorized = true;
} else if (allowedUserGroups != null && !allowedUserGroups.isEmpty()) {
for (String group : user.getGroupNames()) {
if (allowedUserGroups.contains(group)) {
userAuthorized = true;
break;
}
}
}
if (!userAuthorized) {
throw new AuthorizationException("User: " + superUser.getUserName()
+ " is not allowed to impersonate " + user.getUserName());
}
}
Collection<String> ipList = proxyHosts.get(
getProxySuperuserIpConfKey(superUser.getShortUserName()));
if (isWildcardList(ipList)) {
ipAuthorized = true;
} else if (ipList != null && !ipList.isEmpty()) {
for (String allowedHost : ipList) {
InetAddress hostAddr;
try {
hostAddr = InetAddress.getByName(allowedHost);
} catch (UnknownHostException e) {
continue;
}
if (hostAddr.getHostAddress().equals(remoteAddress)) {
// Authorization is successful
ipAuthorized = true;
}
}
}
if(!ipAuthorized) {
throw new AuthorizationException("Unauthorized connection for super-user: "
+ superUser.getUserName() + " from IP " + remoteAddress);
}
}
/**
* Return true if the configuration specifies the special configuration value
* "*", indicating that any group or host list is allowed to use this configuration.
*/
private boolean isWildcardList(Collection<String> list) {
return (list != null) &&
(list.size() == 1) &&
(list.contains("*"));
}
/**
* Returns configuration key for effective usergroups allowed for a superuser
*
* @param userName name of the superuser
* @return configuration key for superuser usergroups
*/
public static String getProxySuperuserUserConfKey(String userName) {
return CONF_HADOOP_PROXYUSER+userName+CONF_USERS;
}
/**
* Returns configuration key for effective groups allowed for a superuser
*
* @param userName name of the superuser
* @return configuration key for superuser groups
*/
public static String getProxySuperuserGroupConfKey(String userName) {
return CONF_HADOOP_PROXYUSER+userName+CONF_GROUPS;
}
/**
* Return configuration key for superuser ip addresses
*
* @param userName name of the superuser
* @return configuration key for superuser ip-addresses
*/
public static String getProxySuperuserIpConfKey(String userName) {
return CONF_HADOOP_PROXYUSER+userName+CONF_HOSTS;
}
@VisibleForTesting
public Map<String, Collection<String>> getProxyUsers() {
return proxyUsers;
}
@VisibleForTesting
public Map<String, Collection<String>> getProxyGroups() {
return proxyGroups;
}
@VisibleForTesting
public Map<String, Collection<String>> getProxyHosts() {
return proxyHosts;
}
}

View File

@ -0,0 +1,34 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.security.authorize;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.security.UserGroupInformation;
public interface ImpersonationProvider extends Configurable {
/**
* Authorize the superuser which is doing doAs
*
* @param user ugi of the effective or proxy user which contains a real user
* @param remoteAddress the ip address of client
* @throws AuthorizationException
*/
public void authorize(UserGroupInformation user, String remoteAddress)
throws AuthorizationException;
}

View File

@ -18,42 +18,35 @@
package org.apache.hadoop.security.authorize; package org.apache.hadoop.security.authorize;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Map.Entry;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.ReflectionUtils;
import com.google.common.annotations.VisibleForTesting; import com.google.common.annotations.VisibleForTesting;
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase", "Hive"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase", "Hive"})
public class ProxyUsers { public class ProxyUsers {
private static final String CONF_HOSTS = ".hosts"; private static volatile ImpersonationProvider sip ;
private static final String CONF_USERS = ".users";
private static final String CONF_GROUPS = ".groups";
private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser.";
private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\.";
private static boolean init = false;
//list of users, groups and hosts per proxyuser
private static Map<String, Collection<String>> proxyUsers =
new HashMap<String, Collection<String>>();
private static Map<String, Collection<String>> proxyGroups =
new HashMap<String, Collection<String>>();
private static Map<String, Collection<String>> proxyHosts =
new HashMap<String, Collection<String>>();
/** /**
* reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts" * Returns an instance of ImpersonationProvider.
* Looks up the configuration to see if there is custom class specified.
* @param conf
* @return ImpersonationProvider
*/
private static ImpersonationProvider getInstance(Configuration conf) {
Class<? extends ImpersonationProvider> clazz =
conf.getClass(
CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS,
DefaultImpersonationProvider.class, ImpersonationProvider.class);
return ReflectionUtils.newInstance(clazz, conf);
}
/**
* refresh Impersonation rules
*/ */
public static void refreshSuperUserGroupsConfiguration() { public static void refreshSuperUserGroupsConfiguration() {
//load server side configuration; //load server side configuration;
@ -64,70 +57,13 @@ public class ProxyUsers {
* refresh configuration * refresh configuration
* @param conf * @param conf
*/ */
public static synchronized void refreshSuperUserGroupsConfiguration(Configuration conf) { public static void refreshSuperUserGroupsConfiguration(Configuration conf) {
// sip is volatile. Any assignment to it as well as the object's state
// remove all existing stuff // will be visible to all the other threads.
proxyGroups.clear(); sip = getInstance(conf);
proxyHosts.clear();
proxyUsers.clear();
// get all the new keys for users
String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS;
Map<String,String> allMatchKeys = conf.getValByRegex(regex);
for(Entry<String, String> entry : allMatchKeys.entrySet()) {
Collection<String> users = StringUtils.getTrimmedStringCollection(entry.getValue());
proxyUsers.put(entry.getKey(), users);
}
// get all the new keys for groups
regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_GROUPS;
allMatchKeys = conf.getValByRegex(regex);
for(Entry<String, String> entry : allMatchKeys.entrySet()) {
proxyGroups.put(entry.getKey(),
StringUtils.getTrimmedStringCollection(entry.getValue()));
}
// now hosts
regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_HOSTS;
allMatchKeys = conf.getValByRegex(regex);
for(Entry<String, String> entry : allMatchKeys.entrySet()) {
proxyHosts.put(entry.getKey(),
StringUtils.getTrimmedStringCollection(entry.getValue()));
}
init = true;
ProxyServers.refresh(conf); ProxyServers.refresh(conf);
} }
/**
* Returns configuration key for effective users allowed for a superuser
*
* @param userName name of the superuser
* @return configuration key for superuser users
*/
public static String getProxySuperuserUserConfKey(String userName) {
return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_USERS;
}
/**
* Returns configuration key for effective user groups allowed for a superuser
*
* @param userName name of the superuser
* @return configuration key for superuser groups
*/
public static String getProxySuperuserGroupConfKey(String userName) {
return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_GROUPS;
}
/**
* Return configuration key for superuser ip addresses
*
* @param userName name of the superuser
* @return configuration key for superuser ip-addresses
*/
public static String getProxySuperuserIpConfKey(String userName) {
return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_HOSTS;
}
/** /**
* Authorize the superuser which is doing doAs * Authorize the superuser which is doing doAs
* *
@ -135,75 +71,14 @@ public class ProxyUsers {
* @param remoteAddress the ip address of client * @param remoteAddress the ip address of client
* @throws AuthorizationException * @throws AuthorizationException
*/ */
public static synchronized void authorize(UserGroupInformation user, public static void authorize(UserGroupInformation user,
String remoteAddress) throws AuthorizationException { String remoteAddress) throws AuthorizationException {
if (sip==null) {
if(!init) { // In a race situation, It is possible for multiple threads to satisfy this condition.
// The last assignment will prevail.
refreshSuperUserGroupsConfiguration(); refreshSuperUserGroupsConfiguration();
} }
sip.authorize(user, remoteAddress);
if (user.getRealUser() == null) {
return;
}
boolean userAuthorized = false;
boolean ipAuthorized = false;
UserGroupInformation superUser = user.getRealUser();
Collection<String> allowedUsers = proxyUsers.get(
getProxySuperuserUserConfKey(superUser.getShortUserName()));
if (isWildcardList(allowedUsers)) {
userAuthorized = true;
} else if (allowedUsers != null && !allowedUsers.isEmpty()) {
if (allowedUsers.contains(user.getShortUserName())) {
userAuthorized = true;
}
}
if (!userAuthorized) {
Collection<String> allowedUserGroups = proxyGroups.get(
getProxySuperuserGroupConfKey(superUser.getShortUserName()));
if (isWildcardList(allowedUserGroups)) {
userAuthorized = true;
} else if (allowedUserGroups != null && !allowedUserGroups.isEmpty()) {
for (String group : user.getGroupNames()) {
if (allowedUserGroups.contains(group)) {
userAuthorized = true;
break;
}
}
}
if (!userAuthorized) {
throw new AuthorizationException("User: " + superUser.getUserName()
+ " is not allowed to impersonate " + user.getUserName());
}
}
Collection<String> ipList = proxyHosts.get(
getProxySuperuserIpConfKey(superUser.getShortUserName()));
if (isWildcardList(ipList)) {
ipAuthorized = true;
} else if (ipList != null && !ipList.isEmpty()) {
for (String allowedHost : ipList) {
InetAddress hostAddr;
try {
hostAddr = InetAddress.getByName(allowedHost);
} catch (UnknownHostException e) {
continue;
}
if (hostAddr.getHostAddress().equals(remoteAddress)) {
// Authorization is successful
ipAuthorized = true;
}
}
}
if (!ipAuthorized) {
throw new AuthorizationException("Unauthorized connection for super-user: "
+ superUser.getUserName() + " from IP " + remoteAddress);
}
} }
/** /**
@ -215,33 +90,14 @@ public class ProxyUsers {
* @deprecated use {@link #authorize(UserGroupInformation, String) instead. * @deprecated use {@link #authorize(UserGroupInformation, String) instead.
*/ */
@Deprecated @Deprecated
public static synchronized void authorize(UserGroupInformation user, public static void authorize(UserGroupInformation user,
String remoteAddress, Configuration conf) throws AuthorizationException { String remoteAddress, Configuration conf) throws AuthorizationException {
authorize(user,remoteAddress); authorize(user,remoteAddress);
} }
/** @VisibleForTesting
* Return true if the configuration specifies the special configuration value public static DefaultImpersonationProvider getDefaultImpersonationProvider() {
* "*", indicating that any group or host list is allowed to use this configuration. return ((DefaultImpersonationProvider)sip);
*/
private static boolean isWildcardList(Collection<String> list) {
return (list != null) &&
(list.size() == 1) &&
(list.contains("*"));
}
@VisibleForTesting
public static Map<String, Collection<String>> getProxyUsers() {
return proxyUsers;
}
@VisibleForTesting
public static Map<String, Collection<String>> getProxyGroups() {
return proxyGroups;
}
@VisibleForTesting
public static Map<String, Collection<String>> getProxyHosts() {
return proxyHosts;
} }
} }

View File

@ -714,6 +714,17 @@
<!-- Proxy Configuration --> <!-- Proxy Configuration -->
<property>
<name>hadoop.security.impersonation.provider.class</name>
<value></value>
<description>A class which implements ImpersonationProvider interface, used to
authorize whether one user can impersonate a specific user.
If not specified, the DefaultImpersonationProvider will be used.
If a class is specified, then that class will be used to determine
the impersonation capability.
</description>
</property>
<property> <property>
<name>hadoop.rpc.socket.factory.class.default</name> <name>hadoop.rpc.socket.factory.class.default</name>
<value>org.apache.hadoop.net.StandardSocketFactory</value> <value>org.apache.hadoop.net.StandardSocketFactory</value>

View File

@ -35,6 +35,7 @@ import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.KerberosInfo; import org.apache.hadoop.security.KerberosInfo;
import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenInfo; import org.apache.hadoop.security.token.TokenInfo;
@ -326,7 +327,7 @@ public class MiniRPCBenchmark {
String shortUserName = String shortUserName =
UserGroupInformation.createRemoteUser(user).getShortUserName(); UserGroupInformation.createRemoteUser(user).getShortUserName();
try { try {
conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(shortUserName), conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(shortUserName),
GROUP_NAME_1); GROUP_NAME_1);
configureSuperUserIPAddresses(conf, shortUserName); configureSuperUserIPAddresses(conf, shortUserName);
// start the server // start the server
@ -410,7 +411,7 @@ public class MiniRPCBenchmark {
} }
builder.append("127.0.1.1,"); builder.append("127.0.1.1,");
builder.append(InetAddress.getLocalHost().getCanonicalHostName()); builder.append(InetAddress.getLocalHost().getCanonicalHostName());
conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName), conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName),
builder.toString()); builder.toString());
} }
} }

View File

@ -35,6 +35,7 @@ import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.ipc.VersionedProtocol;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenInfo; import org.apache.hadoop.security.token.TokenInfo;
@ -100,7 +101,7 @@ public class TestDoAsEffectiveUser {
builder.append("127.0.1.1,"); builder.append("127.0.1.1,");
builder.append(InetAddress.getLocalHost().getCanonicalHostName()); builder.append(InetAddress.getLocalHost().getCanonicalHostName());
LOG.info("Local Ip addresses: "+builder.toString()); LOG.info("Local Ip addresses: "+builder.toString());
conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName), conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName),
builder.toString()); builder.toString());
} }
@ -180,7 +181,7 @@ public class TestDoAsEffectiveUser {
@Test(timeout=4000) @Test(timeout=4000)
public void testRealUserSetup() throws IOException { public void testRealUserSetup() throws IOException {
final Configuration conf = new Configuration(); final Configuration conf = new Configuration();
conf.setStrings(ProxyUsers conf.setStrings(DefaultImpersonationProvider
.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1"); .getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1");
configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME); configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME);
Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
@ -213,7 +214,7 @@ public class TestDoAsEffectiveUser {
public void testRealUserAuthorizationSuccess() throws IOException { public void testRealUserAuthorizationSuccess() throws IOException {
final Configuration conf = new Configuration(); final Configuration conf = new Configuration();
configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME); configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME);
conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
"group1"); "group1");
Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
.setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0) .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
@ -247,9 +248,9 @@ public class TestDoAsEffectiveUser {
@Test @Test
public void testRealUserIPAuthorizationFailure() throws IOException { public void testRealUserIPAuthorizationFailure() throws IOException {
final Configuration conf = new Configuration(); final Configuration conf = new Configuration();
conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_SHORT_NAME), conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_SHORT_NAME),
"20.20.20.20"); //Authorized IP address "20.20.20.20"); //Authorized IP address
conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
"group1"); "group1");
Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
.setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0) .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
@ -292,7 +293,7 @@ public class TestDoAsEffectiveUser {
@Test @Test
public void testRealUserIPNotSpecified() throws IOException { public void testRealUserIPNotSpecified() throws IOException {
final Configuration conf = new Configuration(); final Configuration conf = new Configuration();
conf.setStrings(ProxyUsers conf.setStrings(DefaultImpersonationProvider
.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1"); .getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1");
Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
.setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0) .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)
@ -376,7 +377,7 @@ public class TestDoAsEffectiveUser {
public void testRealUserGroupAuthorizationFailure() throws IOException { public void testRealUserGroupAuthorizationFailure() throws IOException {
final Configuration conf = new Configuration(); final Configuration conf = new Configuration();
configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME); configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME);
conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME),
"group3"); "group3");
Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class)
.setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0) .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0)

View File

@ -17,34 +17,126 @@
*/ */
package org.apache.hadoop.security.authorize; package org.apache.hadoop.security.authorize;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.security.Groups;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.NativeCodeLoader;
import org.apache.hadoop.util.StringUtils;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*;
public class TestProxyUsers { public class TestProxyUsers {
private static final Log LOG =
LogFactory.getLog(TestProxyUsers.class);
private static final String REAL_USER_NAME = "proxier"; private static final String REAL_USER_NAME = "proxier";
private static final String PROXY_USER_NAME = "proxied_user"; private static final String PROXY_USER_NAME = "proxied_user";
private static final String AUTHORIZED_PROXY_USER_NAME = "authorized_proxied_user"; private static final String AUTHORIZED_PROXY_USER_NAME = "authorized_proxied_user";
private static final String[] GROUP_NAMES = private static final String[] GROUP_NAMES =
new String[] { "foo_group" }; new String[] { "foo_group" };
private static final String[] NETGROUP_NAMES =
new String[] { "@foo_group" };
private static final String[] OTHER_GROUP_NAMES = private static final String[] OTHER_GROUP_NAMES =
new String[] { "bar_group" }; new String[] { "bar_group" };
private static final String[] SUDO_GROUP_NAMES =
new String[] { "sudo_proxied_user" };
private static final String PROXY_IP = "1.2.3.4"; private static final String PROXY_IP = "1.2.3.4";
/**
* Test the netgroups (groups in ACL rules that start with @)
*
* This is a manual test because it requires:
* - host setup
* - native code compiled
* - specify the group mapping class
*
* Host setup:
*
* /etc/nsswitch.conf should have a line like this:
* netgroup: files
*
* /etc/netgroup should be (the whole file):
* foo_group (,proxied_user,)
*
* To run this test:
*
* export JAVA_HOME='path/to/java'
* mvn test \
* -Dtest=TestProxyUsers \
* -DTestProxyUsersGroupMapping=$className \
*
* where $className is one of the classes that provide group
* mapping services, i.e. classes that implement
* GroupMappingServiceProvider interface, at this time:
* - org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping
* - org.apache.hadoop.security.ShellBasedUnixGroupsNetgroupMapping
*
*/
@Test
public void testNetgroups () throws IOException{
if(!NativeCodeLoader.isNativeCodeLoaded()) {
LOG.info("Not testing netgroups, " +
"this test only runs when native code is compiled");
return;
}
String groupMappingClassName =
System.getProperty("TestProxyUsersGroupMapping");
if(groupMappingClassName == null) {
LOG.info("Not testing netgroups, no group mapping class specified, " +
"use -DTestProxyUsersGroupMapping=$className to specify " +
"group mapping class (must implement GroupMappingServiceProvider " +
"interface and support netgroups)");
return;
}
LOG.info("Testing netgroups using: " + groupMappingClassName);
Configuration conf = new Configuration();
conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_GROUP_MAPPING,
groupMappingClassName);
conf.set(
DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(NETGROUP_NAMES)));
conf.set(
DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Groups groups = Groups.getUserToGroupsMappingService(conf);
// try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createRemoteUser(REAL_USER_NAME);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, groups.getGroups(PROXY_USER_NAME).toArray(
new String[groups.getGroups(PROXY_USER_NAME).size()]));
assertAuthorized(proxyUserUgi, PROXY_IP);
}
@Test @Test
public void testProxyUsers() throws Exception { public void testProxyUsers() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set( conf.set(
ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES))); StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set( conf.set(
ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP); PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@ -75,11 +167,11 @@ public class TestProxyUsers {
public void testProxyUsersWithUserConf() throws Exception { public void testProxyUsersWithUserConf() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set( conf.set(
ProxyUsers.getProxySuperuserUserConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME))); StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME)));
conf.set( conf.set(
ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP); PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@ -109,10 +201,10 @@ public class TestProxyUsers {
public void testWildcardGroup() { public void testWildcardGroup() {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set( conf.set(
ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
"*"); "*");
conf.set( conf.set(
ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP); PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@ -143,10 +235,10 @@ public class TestProxyUsers {
public void testWildcardUser() { public void testWildcardUser() {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set( conf.set(
ProxyUsers.getProxySuperuserUserConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME),
"*"); "*");
conf.set( conf.set(
ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP); PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@ -177,10 +269,10 @@ public class TestProxyUsers {
public void testWildcardIP() { public void testWildcardIP() {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set( conf.set(
ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES))); StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set( conf.set(
ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
"*"); "*");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
@ -208,15 +300,16 @@ public class TestProxyUsers {
public void testWithDuplicateProxyGroups() throws Exception { public void testWithDuplicateProxyGroups() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set( conf.set(
ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES,GROUP_NAMES))); StringUtils.join(",", Arrays.asList(GROUP_NAMES,GROUP_NAMES)));
conf.set( conf.set(
ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
PROXY_IP); PROXY_IP);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Collection<String> groupsToBeProxied = ProxyUsers.getProxyGroups().get( Collection<String> groupsToBeProxied =
ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME)); ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get(
DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME));
assertEquals (1,groupsToBeProxied.size()); assertEquals (1,groupsToBeProxied.size());
} }
@ -225,18 +318,51 @@ public class TestProxyUsers {
public void testWithDuplicateProxyHosts() throws Exception { public void testWithDuplicateProxyHosts() throws Exception {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set( conf.set(
ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(GROUP_NAMES))); StringUtils.join(",", Arrays.asList(GROUP_NAMES)));
conf.set( conf.set(
ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME),
StringUtils.join(",", Arrays.asList(PROXY_IP,PROXY_IP))); StringUtils.join(",", Arrays.asList(PROXY_IP,PROXY_IP)));
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
Collection<String> hosts = ProxyUsers.getProxyHosts().get( Collection<String> hosts =
ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME)); ProxyUsers.getDefaultImpersonationProvider().getProxyHosts().get(
DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME));
assertEquals (1,hosts.size()); assertEquals (1,hosts.size());
} }
@Test
public void testProxyUsersWithProviderOverride() throws Exception {
Configuration conf = new Configuration();
conf.set(
CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS,
"org.apache.hadoop.security.authorize.TestProxyUsers$TestDummyImpersonationProvider");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
// First try proxying a group that's allowed
UserGroupInformation realUserUgi = UserGroupInformation
.createUserForTesting(REAL_USER_NAME, SUDO_GROUP_NAMES);
UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertAuthorized(proxyUserUgi, "1.2.3.5");
// Now try proxying a group that's not allowed
realUserUgi = UserGroupInformation
.createUserForTesting(REAL_USER_NAME, GROUP_NAMES);
proxyUserUgi = UserGroupInformation.createProxyUserForTesting(
PROXY_USER_NAME, realUserUgi, GROUP_NAMES);
// From good IP
assertNotAuthorized(proxyUserUgi, "1.2.3.4");
// From bad IP
assertNotAuthorized(proxyUserUgi, "1.2.3.5");
}
private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) { private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) {
try { try {
@ -254,4 +380,32 @@ public class TestProxyUsers {
fail("Did not allow authorization of " + proxyUgi + " from " + host); fail("Did not allow authorization of " + proxyUgi + " from " + host);
} }
} }
static class TestDummyImpersonationProvider implements ImpersonationProvider {
/**
* Authorize a user (superuser) to impersonate another user (user1) if the
* superuser belongs to the group "sudo_user1" .
*/
public void authorize(UserGroupInformation user,
String remoteAddress) throws AuthorizationException{
UserGroupInformation superUser = user.getRealUser();
String sudoGroupName = "sudo_" + user.getShortUserName();
if (!Arrays.asList(superUser.getGroupNames()).contains(sudoGroupName)){
throw new AuthorizationException("User: " + superUser.getUserName()
+ " is not allowed to impersonate " + user.getUserName());
}
}
@Override
public void setConf(Configuration conf) {
}
@Override
public Configuration getConf() {
return null;
}
}
} }

View File

@ -22,7 +22,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.net.InetAddress; import java.net.InetAddress;
import java.util.Arrays;
import java.util.List; import java.util.List;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -41,9 +40,8 @@ import org.apache.hadoop.nfs.nfs3.response.READDIRPLUS3Response;
import org.apache.hadoop.nfs.nfs3.response.READDIRPLUS3Response.EntryPlus3; import org.apache.hadoop.nfs.nfs3.response.READDIRPLUS3Response.EntryPlus3;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
import org.apache.hadoop.oncrpc.security.SecurityHandler; import org.apache.hadoop.oncrpc.security.SecurityHandler;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.util.StringUtils;
import org.junit.AfterClass; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.BeforeClass; import org.junit.BeforeClass;
@ -67,10 +65,10 @@ public class TestReaddir {
public static void setup() throws Exception { public static void setup() throws Exception {
String currentUser = System.getProperty("user.name"); String currentUser = System.getProperty("user.name");
config.set( config.set(
ProxyUsers.getProxySuperuserGroupConfKey(currentUser), DefaultImpersonationProvider.getProxySuperuserGroupConfKey(currentUser),
"*"); "*");
config.set( config.set(
ProxyUsers.getProxySuperuserIpConfKey(currentUser), DefaultImpersonationProvider.getProxySuperuserIpConfKey(currentUser),
"*"); "*");
ProxyUsers.refreshSuperUserGroupsConfiguration(config); ProxyUsers.refreshSuperUserGroupsConfiguration(config);
cluster = new MiniDFSCluster.Builder(config).numDataNodes(1).build(); cluster = new MiniDFSCluster.Builder(config).numDataNodes(1).build();

View File

@ -50,6 +50,7 @@ import org.apache.hadoop.nfs.nfs3.response.CREATE3Response;
import org.apache.hadoop.nfs.nfs3.response.READ3Response; import org.apache.hadoop.nfs.nfs3.response.READ3Response;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
import org.apache.hadoop.oncrpc.security.SecurityHandler; import org.apache.hadoop.oncrpc.security.SecurityHandler;
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.Channel;
import org.junit.Assert; import org.junit.Assert;
@ -288,10 +289,10 @@ public class TestWrites {
System.getProperty("user.name")); System.getProperty("user.name"));
String currentUser = System.getProperty("user.name"); String currentUser = System.getProperty("user.name");
config.set( config.set(
ProxyUsers.getProxySuperuserGroupConfKey(currentUser), DefaultImpersonationProvider.getProxySuperuserGroupConfKey(currentUser),
"*"); "*");
config.set( config.set(
ProxyUsers.getProxySuperuserIpConfKey(currentUser), DefaultImpersonationProvider.getProxySuperuserIpConfKey(currentUser),
"*"); "*");
ProxyUsers.refreshSuperUserGroupsConfiguration(config); ProxyUsers.refreshSuperUserGroupsConfiguration(config);

View File

@ -45,6 +45,7 @@ import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.hdfs.web.WebHdfsTestUtil;
import org.apache.hadoop.security.TestDoAsEffectiveUser; import org.apache.hadoop.security.TestDoAsEffectiveUser;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.junit.AfterClass; import org.junit.AfterClass;
@ -88,7 +89,7 @@ public class TestDelegationTokenForProxyUser {
builder.append("127.0.1.1,"); builder.append("127.0.1.1,");
builder.append(InetAddress.getLocalHost().getCanonicalHostName()); builder.append(InetAddress.getLocalHost().getCanonicalHostName());
LOG.info("Local Ip addresses: " + builder.toString()); LOG.info("Local Ip addresses: " + builder.toString());
conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName), conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName),
builder.toString()); builder.toString());
} }
@ -100,7 +101,7 @@ public class TestDelegationTokenForProxyUser {
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_KEY, 10000); DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_KEY, 10000);
config.setLong( config.setLong(
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_RENEW_INTERVAL_KEY, 5000); DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_RENEW_INTERVAL_KEY, 5000);
config.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER), config.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER),
"group1"); "group1");
config.setBoolean( config.setBoolean(
DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true);

View File

@ -58,6 +58,7 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyServers; import org.apache.hadoop.security.authorize.ProxyServers;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
@ -327,8 +328,8 @@ public class TestJspHelper {
String user = "TheNurse"; String user = "TheNurse";
conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos");
conf.set(ProxyUsers.getProxySuperuserGroupConfKey(realUser), "*"); conf.set(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(realUser), "*");
conf.set(ProxyUsers.getProxySuperuserIpConfKey(realUser), "*"); conf.set(DefaultImpersonationProvider.getProxySuperuserIpConfKey(realUser), "*");
ProxyUsers.refreshSuperUserGroupsConfiguration(conf); ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
UserGroupInformation.setConfiguration(conf); UserGroupInformation.setConfiguration(conf);
UserGroupInformation ugi; UserGroupInformation ugi;

View File

@ -39,8 +39,8 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.ProxyServers; import org.apache.hadoop.security.authorize.ProxyServers;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.tools.DFSAdmin; import org.apache.hadoop.hdfs.tools.DFSAdmin;
import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.junit.After; import org.junit.After;
import org.junit.Before; import org.junit.Before;
@ -150,8 +151,8 @@ public class TestRefreshUserMappings {
final String [] GROUP_NAMES2 = new String [] {"gr3" , "gr4"}; final String [] GROUP_NAMES2 = new String [] {"gr3" , "gr4"};
//keys in conf //keys in conf
String userKeyGroups = ProxyUsers.getProxySuperuserGroupConfKey(SUPER_USER); String userKeyGroups = DefaultImpersonationProvider.getProxySuperuserGroupConfKey(SUPER_USER);
String userKeyHosts = ProxyUsers.getProxySuperuserIpConfKey (SUPER_USER); String userKeyHosts = DefaultImpersonationProvider.getProxySuperuserIpConfKey (SUPER_USER);
config.set(userKeyGroups, "gr3,gr4,gr5"); // superuser can proxy for this group config.set(userKeyGroups, "gr3,gr4,gr5"); // superuser can proxy for this group
config.set(userKeyHosts,"127.0.0.1"); config.set(userKeyHosts,"127.0.0.1");

View File

@ -348,14 +348,14 @@ public class TestRMAdminService {
rm.adminService.refreshSuperUserGroupsConfiguration( rm.adminService.refreshSuperUserGroupsConfiguration(
RefreshSuperUserGroupsConfigurationRequest.newInstance()); RefreshSuperUserGroupsConfigurationRequest.newInstance());
Assert.assertTrue(ProxyUsers.getProxyGroups() Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyGroups()
.get("hadoop.proxyuser.test.groups").size() == 1); .get("hadoop.proxyuser.test.groups").size() == 1);
Assert.assertTrue(ProxyUsers.getProxyGroups() Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyGroups()
.get("hadoop.proxyuser.test.groups").contains("test_groups")); .get("hadoop.proxyuser.test.groups").contains("test_groups"));
Assert.assertTrue(ProxyUsers.getProxyHosts() Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyHosts()
.get("hadoop.proxyuser.test.hosts").size() == 1); .get("hadoop.proxyuser.test.hosts").size() == 1);
Assert.assertTrue(ProxyUsers.getProxyHosts() Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyHosts()
.get("hadoop.proxyuser.test.hosts").contains("test_hosts")); .get("hadoop.proxyuser.test.hosts").contains("test_hosts"));
} }
@ -708,14 +708,14 @@ public class TestRMAdminService {
aclsString); aclsString);
// verify ProxyUsers and ProxyHosts // verify ProxyUsers and ProxyHosts
Assert.assertTrue(ProxyUsers.getProxyGroups() Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyGroups()
.get("hadoop.proxyuser.test.groups").size() == 1); .get("hadoop.proxyuser.test.groups").size() == 1);
Assert.assertTrue(ProxyUsers.getProxyGroups() Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyGroups()
.get("hadoop.proxyuser.test.groups").contains("test_groups")); .get("hadoop.proxyuser.test.groups").contains("test_groups"));
Assert.assertTrue(ProxyUsers.getProxyHosts() Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyHosts()
.get("hadoop.proxyuser.test.hosts").size() == 1); .get("hadoop.proxyuser.test.hosts").size() == 1);
Assert.assertTrue(ProxyUsers.getProxyHosts() Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyHosts()
.get("hadoop.proxyuser.test.hosts").contains("test_hosts")); .get("hadoop.proxyuser.test.hosts").contains("test_hosts"));
// verify UserToGroupsMappings // verify UserToGroupsMappings