From 0fd49b35370e3c0c72abaf2007d34d4207347693 Mon Sep 17 00:00:00 2001 From: Thomas White Date: Fri, 24 Sep 2010 20:48:59 +0000 Subject: [PATCH] HADOOP-6951. Distinct minicluster services (e.g. NN and JT) overwrite each other's service policies. Contributed by Aaron T. Myers git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1001067 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 3 +++ src/java/org/apache/hadoop/ipc/Server.java | 21 ++++++++++++++++++- .../ServiceAuthorizationManager.java | 12 ++++++++--- .../core/org/apache/hadoop/ipc/TestRPC.java | 5 ++--- 4 files changed, 34 insertions(+), 7 deletions(-) diff --git a/CHANGES.txt b/CHANGES.txt index 75c87c81035..d01c5642732 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -247,6 +247,9 @@ Trunk (unreleased changes) HADOOP-6940. RawLocalFileSystem's markSupported method misnamed markSupport. (Tom White via eli). + HADOOP-6951. Distinct minicluster services (e.g. NN and JT) overwrite each + other's service policies. (Aaron T. Myers via tomwhite) + Release 0.21.0 - Unreleased INCOMPATIBLE CHANGES diff --git a/src/java/org/apache/hadoop/ipc/Server.java b/src/java/org/apache/hadoop/ipc/Server.java index e8ee049cb60..01d76d886ae 100644 --- a/src/java/org/apache/hadoop/ipc/Server.java +++ b/src/java/org/apache/hadoop/ipc/Server.java @@ -60,6 +60,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.io.BytesWritable; @@ -78,6 +79,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.AuthorizationException; +import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.SecretManager; @@ -182,6 +184,7 @@ public static String getRemoteAddress() { private Configuration conf; private SecretManager secretManager; + private ServiceAuthorizationManager serviceAuthorizationManager = new ServiceAuthorizationManager(); private int maxQueueSize; private final int maxRespSize; @@ -239,6 +242,22 @@ public RpcMetrics getRpcMetrics() { return rpcMetrics; } + /** + * Refresh the service authorization ACL for the service handled by this server. + */ + public void refreshServiceAcl(Configuration conf, PolicyProvider provider) { + serviceAuthorizationManager.refresh(conf, provider); + } + + /** + * Returns a handle to the serviceAuthorizationManager (required in tests) + * @return instance of ServiceAuthorizationManager for this server + */ + @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) + public ServiceAuthorizationManager getServiceAuthorizationManager() { + return serviceAuthorizationManager; + } + /** A call queued for handling. */ private static class Call { private int id; // the client's call id @@ -1652,7 +1671,7 @@ public void authorize(UserGroupInformation user, throw new AuthorizationException("Unknown protocol: " + connection.getProtocol()); } - ServiceAuthorizationManager.authorize(user, protocol, getConf(), hostname); + serviceAuthorizationManager.authorize(user, protocol, getConf(), hostname); } } diff --git a/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java b/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java index 3f78cf9ef2e..a73fa2cd9fe 100644 --- a/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java +++ b/src/java/org/apache/hadoop/security/authorize/ServiceAuthorizationManager.java @@ -20,6 +20,7 @@ import java.io.IOException; import java.util.IdentityHashMap; import java.util.Map; +import java.util.Set; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; @@ -43,7 +44,7 @@ public class ServiceAuthorizationManager { private static final Log LOG = LogFactory .getLog(ServiceAuthorizationManager.class); - private static Map, AccessControlList> protocolToAcl = + private Map, AccessControlList> protocolToAcl = new IdentityHashMap, AccessControlList>(); /** @@ -73,7 +74,7 @@ public class ServiceAuthorizationManager { * @param hostname fully qualified domain name of the client * @throws AuthorizationException on authorization failure */ - public static void authorize(UserGroupInformation user, + public void authorize(UserGroupInformation user, Class protocol, Configuration conf, String hostname @@ -129,7 +130,7 @@ public static void authorize(UserGroupInformation user, AUDITLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol); } - public static synchronized void refresh(Configuration conf, + public synchronized void refresh(Configuration conf, PolicyProvider provider) { // Get the system property 'hadoop.policy.file' String policyFile = @@ -158,4 +159,9 @@ public static synchronized void refresh(Configuration conf, // Flip to the newly parsed permissions protocolToAcl = newAcls; } + + // Package-protected for use in tests. + Set> getProtocolsWithAcls() { + return protocolToAcl.keySet(); + } } diff --git a/src/test/core/org/apache/hadoop/ipc/TestRPC.java b/src/test/core/org/apache/hadoop/ipc/TestRPC.java index c87391e4d58..9ca6a6e9361 100644 --- a/src/test/core/org/apache/hadoop/ipc/TestRPC.java +++ b/src/test/core/org/apache/hadoop/ipc/TestRPC.java @@ -41,7 +41,6 @@ import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.authorize.Service; -import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.security.AccessControlException; import static org.mockito.Mockito.*; @@ -364,11 +363,11 @@ public Service[] getServices() { } private void doRPCs(Configuration conf, boolean expectFailure) throws Exception { - ServiceAuthorizationManager.refresh(conf, new TestPolicyProvider()); - Server server = RPC.getServer(TestProtocol.class, new TestImpl(), ADDRESS, 0, 5, true, conf, null); + server.refreshServiceAcl(conf, new TestPolicyProvider()); + TestProtocol proxy = null; server.start();