From d1b7433a62cfbb49c93490243a218ff7c987e937 Mon Sep 17 00:00:00 2001 From: Arpit Agarwal Date: Thu, 29 May 2014 23:38:07 +0000 Subject: [PATCH] HADOOP-9968 and HADOOP-10448. Merging r1570934 and 1598396 from trunk to branch-2. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1598440 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 6 + .../fs/CommonConfigurationKeysPublic.java | 3 + .../DefaultImpersonationProvider.java | 210 ++++++++++++++++++ .../authorize/ImpersonationProvider.java | 34 +++ .../hadoop/security/authorize/ProxyUsers.java | 210 +++--------------- .../src/main/resources/core-default.xml | 11 + .../apache/hadoop/ipc/MiniRPCBenchmark.java | 5 +- .../security/TestDoAsEffectiveUser.java | 15 +- .../security/authorize/TestProxyUsers.java | 200 +++++++++++++++-- .../apache/hadoop/hdfs/nfs/TestReaddir.java | 8 +- .../hadoop/hdfs/nfs/nfs3/TestWrites.java | 5 +- .../TestDelegationTokenForProxyUser.java | 5 +- .../hdfs/server/common/TestJspHelper.java | 5 +- .../hdfs/server/namenode/TestAuditLogger.java | 2 +- .../security/TestRefreshUserMappings.java | 5 +- .../resourcemanager/TestRMAdminService.java | 16 +- 16 files changed, 509 insertions(+), 231 deletions(-) create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index ac687e344f1..6df3a5badf4 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -60,6 +60,12 @@ Release 2.5.0 - UNRELEASED HADOOP-10566. Refactor proxyservers out of ProxyUsers. (Benoy Antony via Arpit Agarwal) + HDFS-9968. ProxyUsers does not work with NetGroups. (Benoy Antony via + Devaraj Das) + + HADOOP-10448. Support pluggable mechanism to specify proxy user settings. + (Benoy Antony via Arpit Agarwal) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index f0ae6d7a489..1cad0821faa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -290,5 +290,8 @@ public class CommonConfigurationKeysPublic { /** Class to override Sasl Properties for a connection */ public static final String HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS = "hadoop.security.saslproperties.resolver.class"; + /** Class to override Impersonation provider */ + public static final String HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS = + "hadoop.security.impersonation.provider.class"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java new file mode 100644 index 00000000000..46b4a928e85 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/DefaultImpersonationProvider.java @@ -0,0 +1,210 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.security.authorize; + +import java.net.InetAddress; +import java.net.UnknownHostException; +import java.util.ArrayList; +import java.util.Collection; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.Groups; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.util.StringUtils; + +import com.google.common.annotations.VisibleForTesting; + +public class DefaultImpersonationProvider implements ImpersonationProvider { + private static final String CONF_HOSTS = ".hosts"; + private static final String CONF_USERS = ".users"; + private static final String CONF_GROUPS = ".groups"; + private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser."; + private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\."; + // list of users, groups and hosts per proxyuser + private Map> proxyUsers = + new HashMap>(); + private Map> proxyGroups = + new HashMap>(); + private Map> proxyHosts = + new HashMap>(); + private Configuration conf; + + @Override + public void setConf(Configuration conf) { + this.conf = conf; + + // get all the new keys for users + String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS; + Map allMatchKeys = conf.getValByRegex(regex); + for(Entry entry : allMatchKeys.entrySet()) { + Collection users = StringUtils.getTrimmedStringCollection(entry.getValue()); + proxyUsers.put(entry.getKey(), users); + } + + // get all the new keys for groups + regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_GROUPS; + allMatchKeys = conf.getValByRegex(regex); + for(Entry entry : allMatchKeys.entrySet()) { + Collection groups = StringUtils.getTrimmedStringCollection(entry.getValue()); + proxyGroups.put(entry.getKey(), groups); + //cache the groups. This is needed for NetGroups + Groups.getUserToGroupsMappingService(conf).cacheGroupsAdd( + new ArrayList(groups)); + } + + // now hosts + regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_HOSTS; + allMatchKeys = conf.getValByRegex(regex); + for(Entry entry : allMatchKeys.entrySet()) { + proxyHosts.put(entry.getKey(), + StringUtils.getTrimmedStringCollection(entry.getValue())); + } + } + + @Override + public Configuration getConf() { + return conf; + } + + @Override + public void authorize(UserGroupInformation user, + String remoteAddress) throws AuthorizationException { + + if (user.getRealUser() == null) { + return; + } + boolean userAuthorized = false; + boolean ipAuthorized = false; + UserGroupInformation superUser = user.getRealUser(); + + Collection allowedUsers = proxyUsers.get( + getProxySuperuserUserConfKey(superUser.getShortUserName())); + + if (isWildcardList(allowedUsers)) { + userAuthorized = true; + } else if (allowedUsers != null && !allowedUsers.isEmpty()) { + if (allowedUsers.contains(user.getShortUserName())) { + userAuthorized = true; + } + } + + if (!userAuthorized){ + Collection allowedUserGroups = proxyGroups.get( + getProxySuperuserGroupConfKey(superUser.getShortUserName())); + + if (isWildcardList(allowedUserGroups)) { + userAuthorized = true; + } else if (allowedUserGroups != null && !allowedUserGroups.isEmpty()) { + for (String group : user.getGroupNames()) { + if (allowedUserGroups.contains(group)) { + userAuthorized = true; + break; + } + } + } + + if (!userAuthorized) { + throw new AuthorizationException("User: " + superUser.getUserName() + + " is not allowed to impersonate " + user.getUserName()); + } + } + + Collection ipList = proxyHosts.get( + getProxySuperuserIpConfKey(superUser.getShortUserName())); + + if (isWildcardList(ipList)) { + ipAuthorized = true; + } else if (ipList != null && !ipList.isEmpty()) { + for (String allowedHost : ipList) { + InetAddress hostAddr; + try { + hostAddr = InetAddress.getByName(allowedHost); + } catch (UnknownHostException e) { + continue; + } + if (hostAddr.getHostAddress().equals(remoteAddress)) { + // Authorization is successful + ipAuthorized = true; + } + } + } + if(!ipAuthorized) { + throw new AuthorizationException("Unauthorized connection for super-user: " + + superUser.getUserName() + " from IP " + remoteAddress); + } + } + + /** + * Return true if the configuration specifies the special configuration value + * "*", indicating that any group or host list is allowed to use this configuration. + */ + private boolean isWildcardList(Collection list) { + return (list != null) && + (list.size() == 1) && + (list.contains("*")); + } + + /** + * Returns configuration key for effective usergroups allowed for a superuser + * + * @param userName name of the superuser + * @return configuration key for superuser usergroups + */ + public static String getProxySuperuserUserConfKey(String userName) { + return CONF_HADOOP_PROXYUSER+userName+CONF_USERS; + } + + /** + * Returns configuration key for effective groups allowed for a superuser + * + * @param userName name of the superuser + * @return configuration key for superuser groups + */ + public static String getProxySuperuserGroupConfKey(String userName) { + return CONF_HADOOP_PROXYUSER+userName+CONF_GROUPS; + } + + /** + * Return configuration key for superuser ip addresses + * + * @param userName name of the superuser + * @return configuration key for superuser ip-addresses + */ + public static String getProxySuperuserIpConfKey(String userName) { + return CONF_HADOOP_PROXYUSER+userName+CONF_HOSTS; + } + + @VisibleForTesting + public Map> getProxyUsers() { + return proxyUsers; + } + + @VisibleForTesting + public Map> getProxyGroups() { + return proxyGroups; + } + + @VisibleForTesting + public Map> getProxyHosts() { + return proxyHosts; + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java new file mode 100644 index 00000000000..6e7a39565df --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ImpersonationProvider.java @@ -0,0 +1,34 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.security.authorize; + +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.security.UserGroupInformation; + +public interface ImpersonationProvider extends Configurable { + /** + * Authorize the superuser which is doing doAs + * + * @param user ugi of the effective or proxy user which contains a real user + * @param remoteAddress the ip address of client + * @throws AuthorizationException + */ + public void authorize(UserGroupInformation user, String remoteAddress) + throws AuthorizationException; +} \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java index 9fa232b184f..e221ae390f2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/authorize/ProxyUsers.java @@ -18,42 +18,35 @@ package org.apache.hadoop.security.authorize; -import java.net.InetAddress; -import java.net.InetSocketAddress; -import java.net.UnknownHostException; -import java.util.Collection; -import java.util.HashMap; -import java.util.HashSet; -import java.util.Map; -import java.util.Map.Entry; - import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.util.ReflectionUtils; import com.google.common.annotations.VisibleForTesting; @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "HBase", "Hive"}) public class ProxyUsers { - private static final String CONF_HOSTS = ".hosts"; - private static final String CONF_USERS = ".users"; - private static final String CONF_GROUPS = ".groups"; - private static final String CONF_HADOOP_PROXYUSER = "hadoop.proxyuser."; - private static final String CONF_HADOOP_PROXYUSER_RE = "hadoop\\.proxyuser\\."; - - private static boolean init = false; - //list of users, groups and hosts per proxyuser - private static Map> proxyUsers = - new HashMap>(); - private static Map> proxyGroups = - new HashMap>(); - private static Map> proxyHosts = - new HashMap>(); + private static volatile ImpersonationProvider sip ; /** - * reread the conf and get new values for "hadoop.proxyuser.*.groups/users/hosts" + * Returns an instance of ImpersonationProvider. + * Looks up the configuration to see if there is custom class specified. + * @param conf + * @return ImpersonationProvider + */ + private static ImpersonationProvider getInstance(Configuration conf) { + Class clazz = + conf.getClass( + CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS, + DefaultImpersonationProvider.class, ImpersonationProvider.class); + return ReflectionUtils.newInstance(clazz, conf); + } + + /** + * refresh Impersonation rules */ public static void refreshSuperUserGroupsConfiguration() { //load server side configuration; @@ -64,70 +57,13 @@ public class ProxyUsers { * refresh configuration * @param conf */ - public static synchronized void refreshSuperUserGroupsConfiguration(Configuration conf) { - - // remove all existing stuff - proxyGroups.clear(); - proxyHosts.clear(); - proxyUsers.clear(); - - // get all the new keys for users - String regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_USERS; - Map allMatchKeys = conf.getValByRegex(regex); - for(Entry entry : allMatchKeys.entrySet()) { - Collection users = StringUtils.getTrimmedStringCollection(entry.getValue()); - proxyUsers.put(entry.getKey(), users); - } - - // get all the new keys for groups - regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_GROUPS; - allMatchKeys = conf.getValByRegex(regex); - for(Entry entry : allMatchKeys.entrySet()) { - proxyGroups.put(entry.getKey(), - StringUtils.getTrimmedStringCollection(entry.getValue())); - } - - // now hosts - regex = CONF_HADOOP_PROXYUSER_RE+"[^.]*\\"+CONF_HOSTS; - allMatchKeys = conf.getValByRegex(regex); - for(Entry entry : allMatchKeys.entrySet()) { - proxyHosts.put(entry.getKey(), - StringUtils.getTrimmedStringCollection(entry.getValue())); - } - init = true; + public static void refreshSuperUserGroupsConfiguration(Configuration conf) { + // sip is volatile. Any assignment to it as well as the object's state + // will be visible to all the other threads. + sip = getInstance(conf); ProxyServers.refresh(conf); } - /** - * Returns configuration key for effective users allowed for a superuser - * - * @param userName name of the superuser - * @return configuration key for superuser users - */ - public static String getProxySuperuserUserConfKey(String userName) { - return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_USERS; - } - - /** - * Returns configuration key for effective user groups allowed for a superuser - * - * @param userName name of the superuser - * @return configuration key for superuser groups - */ - public static String getProxySuperuserGroupConfKey(String userName) { - return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_GROUPS; - } - - /** - * Return configuration key for superuser ip addresses - * - * @param userName name of the superuser - * @return configuration key for superuser ip-addresses - */ - public static String getProxySuperuserIpConfKey(String userName) { - return ProxyUsers.CONF_HADOOP_PROXYUSER+userName+ProxyUsers.CONF_HOSTS; - } - /** * Authorize the superuser which is doing doAs * @@ -135,75 +71,14 @@ public class ProxyUsers { * @param remoteAddress the ip address of client * @throws AuthorizationException */ - public static synchronized void authorize(UserGroupInformation user, + public static void authorize(UserGroupInformation user, String remoteAddress) throws AuthorizationException { - - if(!init) { + if (sip==null) { + // In a race situation, It is possible for multiple threads to satisfy this condition. + // The last assignment will prevail. refreshSuperUserGroupsConfiguration(); } - - if (user.getRealUser() == null) { - return; - } - boolean userAuthorized = false; - boolean ipAuthorized = false; - UserGroupInformation superUser = user.getRealUser(); - - Collection allowedUsers = proxyUsers.get( - getProxySuperuserUserConfKey(superUser.getShortUserName())); - - if (isWildcardList(allowedUsers)) { - userAuthorized = true; - } else if (allowedUsers != null && !allowedUsers.isEmpty()) { - if (allowedUsers.contains(user.getShortUserName())) { - userAuthorized = true; - } - } - - if (!userAuthorized) { - Collection allowedUserGroups = proxyGroups.get( - getProxySuperuserGroupConfKey(superUser.getShortUserName())); - - if (isWildcardList(allowedUserGroups)) { - userAuthorized = true; - } else if (allowedUserGroups != null && !allowedUserGroups.isEmpty()) { - for (String group : user.getGroupNames()) { - if (allowedUserGroups.contains(group)) { - userAuthorized = true; - break; - } - } - } - - if (!userAuthorized) { - throw new AuthorizationException("User: " + superUser.getUserName() - + " is not allowed to impersonate " + user.getUserName()); - } - } - - Collection ipList = proxyHosts.get( - getProxySuperuserIpConfKey(superUser.getShortUserName())); - - if (isWildcardList(ipList)) { - ipAuthorized = true; - } else if (ipList != null && !ipList.isEmpty()) { - for (String allowedHost : ipList) { - InetAddress hostAddr; - try { - hostAddr = InetAddress.getByName(allowedHost); - } catch (UnknownHostException e) { - continue; - } - if (hostAddr.getHostAddress().equals(remoteAddress)) { - // Authorization is successful - ipAuthorized = true; - } - } - } - if (!ipAuthorized) { - throw new AuthorizationException("Unauthorized connection for super-user: " - + superUser.getUserName() + " from IP " + remoteAddress); - } + sip.authorize(user, remoteAddress); } /** @@ -215,33 +90,14 @@ public class ProxyUsers { * @deprecated use {@link #authorize(UserGroupInformation, String) instead. */ @Deprecated - public static synchronized void authorize(UserGroupInformation user, + public static void authorize(UserGroupInformation user, String remoteAddress, Configuration conf) throws AuthorizationException { authorize(user,remoteAddress); } - - /** - * Return true if the configuration specifies the special configuration value - * "*", indicating that any group or host list is allowed to use this configuration. - */ - private static boolean isWildcardList(Collection list) { - return (list != null) && - (list.size() == 1) && - (list.contains("*")); - } - - @VisibleForTesting - public static Map> getProxyUsers() { - return proxyUsers; - } - - @VisibleForTesting - public static Map> getProxyGroups() { - return proxyGroups; - } - - @VisibleForTesting - public static Map> getProxyHosts() { - return proxyHosts; + + @VisibleForTesting + public static DefaultImpersonationProvider getDefaultImpersonationProvider() { + return ((DefaultImpersonationProvider)sip); } + } diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index fb05dc93421..3445266d693 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -714,6 +714,17 @@ + + hadoop.security.impersonation.provider.class + + A class which implements ImpersonationProvider interface, used to + authorize whether one user can impersonate a specific user. + If not specified, the DefaultImpersonationProvider will be used. + If a class is specified, then that class will be used to determine + the impersonation capability. + + + hadoop.rpc.socket.factory.class.default org.apache.hadoop.net.StandardSocketFactory diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java index 576ce767487..0bc0b047dad 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/MiniRPCBenchmark.java @@ -35,6 +35,7 @@ import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.KerberosInfo; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.DefaultImpersonationProvider; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenInfo; @@ -326,7 +327,7 @@ public class MiniRPCBenchmark { String shortUserName = UserGroupInformation.createRemoteUser(user).getShortUserName(); try { - conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(shortUserName), + conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(shortUserName), GROUP_NAME_1); configureSuperUserIPAddresses(conf, shortUserName); // start the server @@ -410,7 +411,7 @@ public class MiniRPCBenchmark { } builder.append("127.0.1.1,"); builder.append(InetAddress.getLocalHost().getCanonicalHostName()); - conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName), + conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName), builder.toString()); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java index 6626e8b6348..42e7881d3a9 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java @@ -35,6 +35,7 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.VersionedProtocol; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; +import org.apache.hadoop.security.authorize.DefaultImpersonationProvider; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenInfo; @@ -100,7 +101,7 @@ public class TestDoAsEffectiveUser { builder.append("127.0.1.1,"); builder.append(InetAddress.getLocalHost().getCanonicalHostName()); LOG.info("Local Ip addresses: "+builder.toString()); - conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName), + conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName), builder.toString()); } @@ -180,7 +181,7 @@ public class TestDoAsEffectiveUser { @Test(timeout=4000) public void testRealUserSetup() throws IOException { final Configuration conf = new Configuration(); - conf.setStrings(ProxyUsers + conf.setStrings(DefaultImpersonationProvider .getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1"); configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME); Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) @@ -213,7 +214,7 @@ public class TestDoAsEffectiveUser { public void testRealUserAuthorizationSuccess() throws IOException { final Configuration conf = new Configuration(); configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME); - conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), + conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1"); Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0) @@ -247,9 +248,9 @@ public class TestDoAsEffectiveUser { @Test public void testRealUserIPAuthorizationFailure() throws IOException { final Configuration conf = new Configuration(); - conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_SHORT_NAME), + conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_SHORT_NAME), "20.20.20.20"); //Authorized IP address - conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), + conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1"); Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0) @@ -292,7 +293,7 @@ public class TestDoAsEffectiveUser { @Test public void testRealUserIPNotSpecified() throws IOException { final Configuration conf = new Configuration(); - conf.setStrings(ProxyUsers + conf.setStrings(DefaultImpersonationProvider .getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1"); Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0) @@ -376,7 +377,7 @@ public class TestDoAsEffectiveUser { public void testRealUserGroupAuthorizationFailure() throws IOException { final Configuration conf = new Configuration(); configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME); - conf.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), + conf.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group3"); Server server = new RPC.Builder(conf).setProtocol(TestProtocol.class) .setInstance(new TestImpl()).setBindAddress(ADDRESS).setPort(0) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java index 06504807726..dcb3e7c823c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/authorize/TestProxyUsers.java @@ -17,34 +17,126 @@ */ package org.apache.hadoop.security.authorize; +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.fail; + +import java.io.IOException; import java.util.Arrays; import java.util.Collection; +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.security.Groups; import org.apache.hadoop.security.UserGroupInformation; - +import org.apache.hadoop.util.NativeCodeLoader; +import org.apache.hadoop.util.StringUtils; import org.junit.Test; -import static org.junit.Assert.*; + public class TestProxyUsers { + private static final Log LOG = + LogFactory.getLog(TestProxyUsers.class); private static final String REAL_USER_NAME = "proxier"; private static final String PROXY_USER_NAME = "proxied_user"; private static final String AUTHORIZED_PROXY_USER_NAME = "authorized_proxied_user"; private static final String[] GROUP_NAMES = new String[] { "foo_group" }; + private static final String[] NETGROUP_NAMES = + new String[] { "@foo_group" }; private static final String[] OTHER_GROUP_NAMES = new String[] { "bar_group" }; + private static final String[] SUDO_GROUP_NAMES = + new String[] { "sudo_proxied_user" }; private static final String PROXY_IP = "1.2.3.4"; + /** + * Test the netgroups (groups in ACL rules that start with @) + * + * This is a manual test because it requires: + * - host setup + * - native code compiled + * - specify the group mapping class + * + * Host setup: + * + * /etc/nsswitch.conf should have a line like this: + * netgroup: files + * + * /etc/netgroup should be (the whole file): + * foo_group (,proxied_user,) + * + * To run this test: + * + * export JAVA_HOME='path/to/java' + * mvn test \ + * -Dtest=TestProxyUsers \ + * -DTestProxyUsersGroupMapping=$className \ + * + * where $className is one of the classes that provide group + * mapping services, i.e. classes that implement + * GroupMappingServiceProvider interface, at this time: + * - org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping + * - org.apache.hadoop.security.ShellBasedUnixGroupsNetgroupMapping + * + */ + + @Test + public void testNetgroups () throws IOException{ + + if(!NativeCodeLoader.isNativeCodeLoaded()) { + LOG.info("Not testing netgroups, " + + "this test only runs when native code is compiled"); + return; + } + + String groupMappingClassName = + System.getProperty("TestProxyUsersGroupMapping"); + + if(groupMappingClassName == null) { + LOG.info("Not testing netgroups, no group mapping class specified, " + + "use -DTestProxyUsersGroupMapping=$className to specify " + + "group mapping class (must implement GroupMappingServiceProvider " + + "interface and support netgroups)"); + return; + } + + LOG.info("Testing netgroups using: " + groupMappingClassName); + + Configuration conf = new Configuration(); + conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_GROUP_MAPPING, + groupMappingClassName); + + conf.set( + DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME), + StringUtils.join(",", Arrays.asList(NETGROUP_NAMES))); + conf.set( + DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME), + PROXY_IP); + + ProxyUsers.refreshSuperUserGroupsConfiguration(conf); + Groups groups = Groups.getUserToGroupsMappingService(conf); + + // try proxying a group that's allowed + UserGroupInformation realUserUgi = UserGroupInformation + .createRemoteUser(REAL_USER_NAME); + + UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting( + PROXY_USER_NAME, realUserUgi, groups.getGroups(PROXY_USER_NAME).toArray( + new String[groups.getGroups(PROXY_USER_NAME).size()])); + + assertAuthorized(proxyUserUgi, PROXY_IP); + } + @Test public void testProxyUsers() throws Exception { Configuration conf = new Configuration(); conf.set( - ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME), StringUtils.join(",", Arrays.asList(GROUP_NAMES))); conf.set( - ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME), PROXY_IP); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); @@ -75,11 +167,11 @@ public class TestProxyUsers { public void testProxyUsersWithUserConf() throws Exception { Configuration conf = new Configuration(); conf.set( - ProxyUsers.getProxySuperuserUserConfKey(REAL_USER_NAME), - StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME))); + DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME), + StringUtils.join(",", Arrays.asList(AUTHORIZED_PROXY_USER_NAME))); conf.set( - ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), - PROXY_IP); + DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME), + PROXY_IP); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); @@ -109,10 +201,10 @@ public class TestProxyUsers { public void testWildcardGroup() { Configuration conf = new Configuration(); conf.set( - ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME), "*"); conf.set( - ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME), PROXY_IP); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); @@ -143,10 +235,10 @@ public class TestProxyUsers { public void testWildcardUser() { Configuration conf = new Configuration(); conf.set( - ProxyUsers.getProxySuperuserUserConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserUserConfKey(REAL_USER_NAME), "*"); conf.set( - ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME), PROXY_IP); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); @@ -177,10 +269,10 @@ public class TestProxyUsers { public void testWildcardIP() { Configuration conf = new Configuration(); conf.set( - ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME), StringUtils.join(",", Arrays.asList(GROUP_NAMES))); conf.set( - ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME), "*"); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); @@ -208,15 +300,16 @@ public class TestProxyUsers { public void testWithDuplicateProxyGroups() throws Exception { Configuration conf = new Configuration(); conf.set( - ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME), StringUtils.join(",", Arrays.asList(GROUP_NAMES,GROUP_NAMES))); conf.set( - ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME), PROXY_IP); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); - Collection groupsToBeProxied = ProxyUsers.getProxyGroups().get( - ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME)); + Collection groupsToBeProxied = + ProxyUsers.getDefaultImpersonationProvider().getProxyGroups().get( + DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME)); assertEquals (1,groupsToBeProxied.size()); } @@ -225,18 +318,51 @@ public class TestProxyUsers { public void testWithDuplicateProxyHosts() throws Exception { Configuration conf = new Configuration(); conf.set( - ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER_NAME), StringUtils.join(",", Arrays.asList(GROUP_NAMES))); conf.set( - ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME), + DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME), StringUtils.join(",", Arrays.asList(PROXY_IP,PROXY_IP))); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); - Collection hosts = ProxyUsers.getProxyHosts().get( - ProxyUsers.getProxySuperuserIpConfKey(REAL_USER_NAME)); + Collection hosts = + ProxyUsers.getDefaultImpersonationProvider().getProxyHosts().get( + DefaultImpersonationProvider.getProxySuperuserIpConfKey(REAL_USER_NAME)); assertEquals (1,hosts.size()); } + + @Test + public void testProxyUsersWithProviderOverride() throws Exception { + Configuration conf = new Configuration(); + conf.set( + CommonConfigurationKeysPublic.HADOOP_SECURITY_IMPERSONATION_PROVIDER_CLASS, + "org.apache.hadoop.security.authorize.TestProxyUsers$TestDummyImpersonationProvider"); + ProxyUsers.refreshSuperUserGroupsConfiguration(conf); + + // First try proxying a group that's allowed + UserGroupInformation realUserUgi = UserGroupInformation + .createUserForTesting(REAL_USER_NAME, SUDO_GROUP_NAMES); + UserGroupInformation proxyUserUgi = UserGroupInformation.createProxyUserForTesting( + PROXY_USER_NAME, realUserUgi, GROUP_NAMES); + + // From good IP + assertAuthorized(proxyUserUgi, "1.2.3.4"); + // From bad IP + assertAuthorized(proxyUserUgi, "1.2.3.5"); + + // Now try proxying a group that's not allowed + realUserUgi = UserGroupInformation + .createUserForTesting(REAL_USER_NAME, GROUP_NAMES); + proxyUserUgi = UserGroupInformation.createProxyUserForTesting( + PROXY_USER_NAME, realUserUgi, GROUP_NAMES); + + // From good IP + assertNotAuthorized(proxyUserUgi, "1.2.3.4"); + // From bad IP + assertNotAuthorized(proxyUserUgi, "1.2.3.5"); + } + private void assertNotAuthorized(UserGroupInformation proxyUgi, String host) { try { @@ -254,4 +380,32 @@ public class TestProxyUsers { fail("Did not allow authorization of " + proxyUgi + " from " + host); } } + + static class TestDummyImpersonationProvider implements ImpersonationProvider { + /** + * Authorize a user (superuser) to impersonate another user (user1) if the + * superuser belongs to the group "sudo_user1" . + */ + + public void authorize(UserGroupInformation user, + String remoteAddress) throws AuthorizationException{ + UserGroupInformation superUser = user.getRealUser(); + + String sudoGroupName = "sudo_" + user.getShortUserName(); + if (!Arrays.asList(superUser.getGroupNames()).contains(sudoGroupName)){ + throw new AuthorizationException("User: " + superUser.getUserName() + + " is not allowed to impersonate " + user.getUserName()); + } + } + + @Override + public void setConf(Configuration conf) { + + } + + @Override + public Configuration getConf() { + return null; + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestReaddir.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestReaddir.java index 1ca43bc8abb..4a19c6e82ce 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestReaddir.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestReaddir.java @@ -22,7 +22,6 @@ import static org.junit.Assert.assertTrue; import java.io.IOException; import java.net.InetAddress; -import java.util.Arrays; import java.util.List; import org.apache.hadoop.conf.Configuration; @@ -41,9 +40,8 @@ import org.apache.hadoop.nfs.nfs3.response.READDIRPLUS3Response; import org.apache.hadoop.nfs.nfs3.response.READDIRPLUS3Response.EntryPlus3; import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.security.SecurityHandler; -import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.DefaultImpersonationProvider; import org.apache.hadoop.security.authorize.ProxyUsers; -import org.apache.hadoop.util.StringUtils; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -67,10 +65,10 @@ public class TestReaddir { public static void setup() throws Exception { String currentUser = System.getProperty("user.name"); config.set( - ProxyUsers.getProxySuperuserGroupConfKey(currentUser), + DefaultImpersonationProvider.getProxySuperuserGroupConfKey(currentUser), "*"); config.set( - ProxyUsers.getProxySuperuserIpConfKey(currentUser), + DefaultImpersonationProvider.getProxySuperuserIpConfKey(currentUser), "*"); ProxyUsers.refreshSuperUserGroupsConfiguration(config); cluster = new MiniDFSCluster.Builder(config).numDataNodes(1).build(); diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestWrites.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestWrites.java index 4771204a252..e1d7d4067ce 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestWrites.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestWrites.java @@ -50,6 +50,7 @@ import org.apache.hadoop.nfs.nfs3.response.CREATE3Response; import org.apache.hadoop.nfs.nfs3.response.READ3Response; import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.security.SecurityHandler; +import org.apache.hadoop.security.authorize.DefaultImpersonationProvider; import org.apache.hadoop.security.authorize.ProxyUsers; import org.jboss.netty.channel.Channel; import org.junit.Assert; @@ -288,10 +289,10 @@ public class TestWrites { System.getProperty("user.name")); String currentUser = System.getProperty("user.name"); config.set( - ProxyUsers.getProxySuperuserGroupConfKey(currentUser), + DefaultImpersonationProvider.getProxySuperuserGroupConfKey(currentUser), "*"); config.set( - ProxyUsers.getProxySuperuserIpConfKey(currentUser), + DefaultImpersonationProvider.getProxySuperuserIpConfKey(currentUser), "*"); ProxyUsers.refreshSuperUserGroupsConfiguration(config); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java index 05a89042577..fa29b4b03f2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/TestDelegationTokenForProxyUser.java @@ -45,6 +45,7 @@ import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.WebHdfsTestUtil; import org.apache.hadoop.security.TestDoAsEffectiveUser; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.authorize.DefaultImpersonationProvider; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; import org.junit.AfterClass; @@ -88,7 +89,7 @@ public class TestDelegationTokenForProxyUser { builder.append("127.0.1.1,"); builder.append(InetAddress.getLocalHost().getCanonicalHostName()); LOG.info("Local Ip addresses: " + builder.toString()); - conf.setStrings(ProxyUsers.getProxySuperuserIpConfKey(superUserShortName), + conf.setStrings(DefaultImpersonationProvider.getProxySuperuserIpConfKey(superUserShortName), builder.toString()); } @@ -100,7 +101,7 @@ public class TestDelegationTokenForProxyUser { DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_MAX_LIFETIME_KEY, 10000); config.setLong( DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_RENEW_INTERVAL_KEY, 5000); - config.setStrings(ProxyUsers.getProxySuperuserGroupConfKey(REAL_USER), + config.setStrings(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(REAL_USER), "group1"); config.setBoolean( DFSConfigKeys.DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java index ee81432f29a..87a22390416 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/common/TestJspHelper.java @@ -58,6 +58,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.authorize.DefaultImpersonationProvider; import org.apache.hadoop.security.authorize.ProxyServers; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.token.Token; @@ -327,8 +328,8 @@ public class TestJspHelper { String user = "TheNurse"; conf.set(DFSConfigKeys.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); - conf.set(ProxyUsers.getProxySuperuserGroupConfKey(realUser), "*"); - conf.set(ProxyUsers.getProxySuperuserIpConfKey(realUser), "*"); + conf.set(DefaultImpersonationProvider.getProxySuperuserGroupConfKey(realUser), "*"); + conf.set(DefaultImpersonationProvider.getProxySuperuserIpConfKey(realUser), "*"); ProxyUsers.refreshSuperUserGroupsConfiguration(conf); UserGroupInformation.setConfiguration(conf); UserGroupInformation ugi; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java index 8d40cd01c5e..29fee68a06e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestAuditLogger.java @@ -39,8 +39,8 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.net.NetUtils; -import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyServers; +import org.apache.hadoop.security.authorize.ProxyUsers; import org.junit.Before; import org.junit.Test; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/security/TestRefreshUserMappings.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/security/TestRefreshUserMappings.java index df9462828be..aadc6b0a417 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/security/TestRefreshUserMappings.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/security/TestRefreshUserMappings.java @@ -41,6 +41,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.tools.DFSAdmin; import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.authorize.DefaultImpersonationProvider; import org.apache.hadoop.security.authorize.ProxyUsers; import org.junit.After; import org.junit.Before; @@ -150,8 +151,8 @@ public class TestRefreshUserMappings { final String [] GROUP_NAMES2 = new String [] {"gr3" , "gr4"}; //keys in conf - String userKeyGroups = ProxyUsers.getProxySuperuserGroupConfKey(SUPER_USER); - String userKeyHosts = ProxyUsers.getProxySuperuserIpConfKey (SUPER_USER); + String userKeyGroups = DefaultImpersonationProvider.getProxySuperuserGroupConfKey(SUPER_USER); + String userKeyHosts = DefaultImpersonationProvider.getProxySuperuserIpConfKey (SUPER_USER); config.set(userKeyGroups, "gr3,gr4,gr5"); // superuser can proxy for this group config.set(userKeyHosts,"127.0.0.1"); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java index 32e78ebf182..554f57c8a0a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAdminService.java @@ -348,14 +348,14 @@ public class TestRMAdminService { rm.adminService.refreshSuperUserGroupsConfiguration( RefreshSuperUserGroupsConfigurationRequest.newInstance()); - Assert.assertTrue(ProxyUsers.getProxyGroups() + Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyGroups() .get("hadoop.proxyuser.test.groups").size() == 1); - Assert.assertTrue(ProxyUsers.getProxyGroups() + Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyGroups() .get("hadoop.proxyuser.test.groups").contains("test_groups")); - Assert.assertTrue(ProxyUsers.getProxyHosts() + Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyHosts() .get("hadoop.proxyuser.test.hosts").size() == 1); - Assert.assertTrue(ProxyUsers.getProxyHosts() + Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyHosts() .get("hadoop.proxyuser.test.hosts").contains("test_hosts")); } @@ -708,14 +708,14 @@ public class TestRMAdminService { aclsString); // verify ProxyUsers and ProxyHosts - Assert.assertTrue(ProxyUsers.getProxyGroups() + Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyGroups() .get("hadoop.proxyuser.test.groups").size() == 1); - Assert.assertTrue(ProxyUsers.getProxyGroups() + Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyGroups() .get("hadoop.proxyuser.test.groups").contains("test_groups")); - Assert.assertTrue(ProxyUsers.getProxyHosts() + Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyHosts() .get("hadoop.proxyuser.test.hosts").size() == 1); - Assert.assertTrue(ProxyUsers.getProxyHosts() + Assert.assertTrue(ProxyUsers.getDefaultImpersonationProvider().getProxyHosts() .get("hadoop.proxyuser.test.hosts").contains("test_hosts")); // verify UserToGroupsMappings