Reverting commit 1001067 while related HDFS and MR JIRAs are tested (HADOOP-6951).

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1001127 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Thomas White 2010-09-25 02:41:10 +00:00
parent 0fd49b3537
commit 47d4ebebac
4 changed files with 7 additions and 34 deletions

View File

@ -247,9 +247,6 @@ Trunk (unreleased changes)
HADOOP-6940. RawLocalFileSystem's markSupported method misnamed markSupport. HADOOP-6940. RawLocalFileSystem's markSupported method misnamed markSupport.
(Tom White via eli). (Tom White via eli).
HADOOP-6951. Distinct minicluster services (e.g. NN and JT) overwrite each
other's service policies. (Aaron T. Myers via tomwhite)
Release 0.21.0 - Unreleased Release 0.21.0 - Unreleased
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -60,7 +60,6 @@ import javax.security.sasl.SaslServer;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.BytesWritable;
@ -79,7 +78,6 @@ import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.ProxyUsers; import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager; import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.security.token.SecretManager; import org.apache.hadoop.security.token.SecretManager;
@ -184,7 +182,6 @@ public abstract class Server {
private Configuration conf; private Configuration conf;
private SecretManager<TokenIdentifier> secretManager; private SecretManager<TokenIdentifier> secretManager;
private ServiceAuthorizationManager serviceAuthorizationManager = new ServiceAuthorizationManager();
private int maxQueueSize; private int maxQueueSize;
private final int maxRespSize; private final int maxRespSize;
@ -242,22 +239,6 @@ public abstract class Server {
return rpcMetrics; return rpcMetrics;
} }
/**
* Refresh the service authorization ACL for the service handled by this server.
*/
public void refreshServiceAcl(Configuration conf, PolicyProvider provider) {
serviceAuthorizationManager.refresh(conf, provider);
}
/**
* Returns a handle to the serviceAuthorizationManager (required in tests)
* @return instance of ServiceAuthorizationManager for this server
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public ServiceAuthorizationManager getServiceAuthorizationManager() {
return serviceAuthorizationManager;
}
/** A call queued for handling. */ /** A call queued for handling. */
private static class Call { private static class Call {
private int id; // the client's call id private int id; // the client's call id
@ -1671,7 +1652,7 @@ public abstract class Server {
throw new AuthorizationException("Unknown protocol: " + throw new AuthorizationException("Unknown protocol: " +
connection.getProtocol()); connection.getProtocol());
} }
serviceAuthorizationManager.authorize(user, protocol, getConf(), hostname); ServiceAuthorizationManager.authorize(user, protocol, getConf(), hostname);
} }
} }

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.security.authorize;
import java.io.IOException; import java.io.IOException;
import java.util.IdentityHashMap; import java.util.IdentityHashMap;
import java.util.Map; import java.util.Map;
import java.util.Set;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
@ -44,7 +43,7 @@ public class ServiceAuthorizationManager {
private static final Log LOG = LogFactory private static final Log LOG = LogFactory
.getLog(ServiceAuthorizationManager.class); .getLog(ServiceAuthorizationManager.class);
private Map<Class<?>, AccessControlList> protocolToAcl = private static Map<Class<?>, AccessControlList> protocolToAcl =
new IdentityHashMap<Class<?>, AccessControlList>(); new IdentityHashMap<Class<?>, AccessControlList>();
/** /**
@ -74,7 +73,7 @@ public class ServiceAuthorizationManager {
* @param hostname fully qualified domain name of the client * @param hostname fully qualified domain name of the client
* @throws AuthorizationException on authorization failure * @throws AuthorizationException on authorization failure
*/ */
public void authorize(UserGroupInformation user, public static void authorize(UserGroupInformation user,
Class<?> protocol, Class<?> protocol,
Configuration conf, Configuration conf,
String hostname String hostname
@ -130,7 +129,7 @@ public class ServiceAuthorizationManager {
AUDITLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol); AUDITLOG.info(AUTHZ_SUCCESSFULL_FOR + user + " for protocol="+protocol);
} }
public synchronized void refresh(Configuration conf, public static synchronized void refresh(Configuration conf,
PolicyProvider provider) { PolicyProvider provider) {
// Get the system property 'hadoop.policy.file' // Get the system property 'hadoop.policy.file'
String policyFile = String policyFile =
@ -159,9 +158,4 @@ public class ServiceAuthorizationManager {
// Flip to the newly parsed permissions // Flip to the newly parsed permissions
protocolToAcl = newAcls; protocolToAcl = newAcls;
} }
// Package-protected for use in tests.
Set<Class<?>> getProtocolsWithAcls() {
return protocolToAcl.keySet();
}
} }

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.authorize.AuthorizationException; import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.PolicyProvider; import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.Service; import org.apache.hadoop.security.authorize.Service;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import static org.mockito.Mockito.*; import static org.mockito.Mockito.*;
@ -363,11 +364,11 @@ public class TestRPC extends TestCase {
} }
private void doRPCs(Configuration conf, boolean expectFailure) throws Exception { private void doRPCs(Configuration conf, boolean expectFailure) throws Exception {
ServiceAuthorizationManager.refresh(conf, new TestPolicyProvider());
Server server = RPC.getServer(TestProtocol.class, Server server = RPC.getServer(TestProtocol.class,
new TestImpl(), ADDRESS, 0, 5, true, conf, null); new TestImpl(), ADDRESS, 0, 5, true, conf, null);
server.refreshServiceAcl(conf, new TestPolicyProvider());
TestProtocol proxy = null; TestProtocol proxy = null;
server.start(); server.start();