HADOOP-10221. Add a plugin to specify SaslProperties for RPC protocol based on connection properties. (Contributed by Benoy Antony and Daryn Sharp)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1579382 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7817245d88
commit
f799618828
|
@ -373,6 +373,10 @@ Release 2.4.0 - UNRELEASED
|
|||
HADOOP-3679. Fixup assert ordering in unit tests to yield meaningful error
|
||||
messages. (Jay Vyas via cdouglas)
|
||||
|
||||
HADOOP-10221. Add a plugin to specify SaslProperties for RPC protocol
|
||||
based on connection properties. (Benoy Antony and Daryn Sharp via
|
||||
Arpit Agarwal)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
BUG FIXES
|
||||
|
|
|
@ -274,5 +274,11 @@ public class CommonConfigurationKeysPublic {
|
|||
/** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */
|
||||
public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT =
|
||||
60;
|
||||
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
|
||||
public static final String HADOOP_RPC_PROTECTION =
|
||||
"hadoop.rpc.protection";
|
||||
/** Class to override Sasl Properties for a connection */
|
||||
public static final String HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS =
|
||||
"hadoop.security.saslproperties.resolver.class";
|
||||
}
|
||||
|
||||
|
|
|
@ -732,6 +732,7 @@ public class Client {
|
|||
// for testing
|
||||
remoteId.saslQop =
|
||||
(String)saslRpcClient.getNegotiatedProperty(Sasl.QOP);
|
||||
LOG.debug("Negotiated QOP is :" + remoteId.saslQop);
|
||||
} else if (UserGroupInformation.isSecurityEnabled() &&
|
||||
!fallbackAllowed) {
|
||||
throw new IOException("Server asks us to fall back to SIMPLE " +
|
||||
|
|
|
@ -98,6 +98,7 @@ import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslAuth;
|
|||
import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.AccessControlException;
|
||||
import org.apache.hadoop.security.SaslPropertiesResolver;
|
||||
import org.apache.hadoop.security.SaslRpcServer;
|
||||
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
|
@ -360,6 +361,7 @@ public abstract class Server {
|
|||
private Configuration conf;
|
||||
private String portRangeConfig = null;
|
||||
private SecretManager<TokenIdentifier> secretManager;
|
||||
private SaslPropertiesResolver saslPropsResolver;
|
||||
private ServiceAuthorizationManager serviceAuthorizationManager = new ServiceAuthorizationManager();
|
||||
|
||||
private int maxQueueSize;
|
||||
|
@ -1637,7 +1639,9 @@ public abstract class Server {
|
|||
|
||||
private SaslServer createSaslServer(AuthMethod authMethod)
|
||||
throws IOException, InterruptedException {
|
||||
return new SaslRpcServer(authMethod).create(this, secretManager);
|
||||
final Map<String,?> saslProps =
|
||||
saslPropsResolver.getServerProperties(addr);
|
||||
return new SaslRpcServer(authMethod).create(this ,saslProps, secretManager);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2254,6 +2258,7 @@ public abstract class Server {
|
|||
|
||||
if (secretManager != null || UserGroupInformation.isSecurityEnabled()) {
|
||||
SaslRpcServer.init(conf);
|
||||
saslPropsResolver = SaslPropertiesResolver.getInstance(conf);
|
||||
}
|
||||
|
||||
this.exceptionsHandler.addTerseExceptions(StandbyException.class);
|
||||
|
|
|
@ -89,6 +89,7 @@ public class SaslRpcClient {
|
|||
private final Configuration conf;
|
||||
|
||||
private SaslClient saslClient;
|
||||
private SaslPropertiesResolver saslPropsResolver;
|
||||
private AuthMethod authMethod;
|
||||
|
||||
private static final RpcRequestHeaderProto saslHeader = ProtoUtil
|
||||
|
@ -112,6 +113,7 @@ public class SaslRpcClient {
|
|||
this.protocol = protocol;
|
||||
this.serverAddr = serverAddr;
|
||||
this.conf = conf;
|
||||
this.saslPropsResolver = SaslPropertiesResolver.getInstance(conf);
|
||||
}
|
||||
|
||||
@VisibleForTesting
|
||||
|
@ -207,7 +209,8 @@ public class SaslRpcClient {
|
|||
// if necessary, auth types below will verify they are valid
|
||||
final String saslProtocol = authType.getProtocol();
|
||||
final String saslServerName = authType.getServerId();
|
||||
Map<String, String> saslProperties = SaslRpcServer.SASL_PROPS;
|
||||
Map<String, String> saslProperties =
|
||||
saslPropsResolver.getClientProperties(serverAddr.getAddress());
|
||||
CallbackHandler saslCallback = null;
|
||||
|
||||
final AuthMethod method = AuthMethod.valueOf(authType.getMethod());
|
||||
|
|
|
@ -70,6 +70,7 @@ public class SaslRpcServer {
|
|||
public static final Map<String, String> SASL_PROPS =
|
||||
new TreeMap<String, String>();
|
||||
private static SaslServerFactory saslFactory;
|
||||
private static SaslPropertiesResolver resolver;
|
||||
|
||||
public static enum QualityOfProtection {
|
||||
AUTHENTICATION("auth"),
|
||||
|
@ -129,7 +130,8 @@ public class SaslRpcServer {
|
|||
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public SaslServer create(Connection connection,
|
||||
public SaslServer create(final Connection connection,
|
||||
final Map<String,?> saslProperties,
|
||||
SecretManager<TokenIdentifier> secretManager
|
||||
) throws IOException, InterruptedException {
|
||||
UserGroupInformation ugi = null;
|
||||
|
@ -162,12 +164,12 @@ public class SaslRpcServer {
|
|||
@Override
|
||||
public SaslServer run() throws SaslException {
|
||||
return saslFactory.createSaslServer(mechanism, protocol, serverId,
|
||||
SaslRpcServer.SASL_PROPS, callback);
|
||||
saslProperties, callback);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
saslServer = saslFactory.createSaslServer(mechanism, protocol, serverId,
|
||||
SaslRpcServer.SASL_PROPS, callback);
|
||||
saslProperties, callback);
|
||||
}
|
||||
if (saslServer == null) {
|
||||
throw new AccessControlException(
|
||||
|
@ -180,17 +182,10 @@ public class SaslRpcServer {
|
|||
}
|
||||
|
||||
public static void init(Configuration conf) {
|
||||
String[] qop = conf.getStrings("hadoop.rpc.protection",
|
||||
QualityOfProtection.AUTHENTICATION.toString());
|
||||
|
||||
for (int i=0; i < qop.length; i++) {
|
||||
qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop();
|
||||
}
|
||||
|
||||
SASL_PROPS.put(Sasl.QOP, StringUtils.join(",", qop));
|
||||
SASL_PROPS.put(Sasl.SERVER_AUTH, "true");
|
||||
Security.addProvider(new SaslPlainServer.SecurityProvider());
|
||||
saslFactory = new FastSaslServerFactory(SASL_PROPS);
|
||||
// passing null so factory is populated with all possibilities. the
|
||||
// properties passed when instantiating a server are what really matter
|
||||
saslFactory = new FastSaslServerFactory(null);
|
||||
}
|
||||
|
||||
static String encodeIdentifier(byte[] identifier) {
|
||||
|
|
|
@ -261,6 +261,19 @@
|
|||
authentication means authentication only and no integrity or privacy;
|
||||
integrity implies authentication and integrity are enabled; and privacy
|
||||
implies all of authentication, integrity and privacy are enabled.
|
||||
hadoop.security.saslproperties.resolver.class can be used to override
|
||||
the hadoop.rpc.protection for a connection at the server side.
|
||||
</description>
|
||||
</property>
|
||||
|
||||
<property>
|
||||
<name>hadoop.security.saslproperties.resolver.class</name>
|
||||
<value></value>
|
||||
<description>SaslPropertiesResolver used to resolve the QOP used for a
|
||||
connection. If not specified, the full set of values specified in
|
||||
hadoop.rpc.protection is used while determining the QOP used for the
|
||||
connection. If a class is specified, then the QOP values returned by
|
||||
the class will be used while determining the QOP used for the connection.
|
||||
</description>
|
||||
</property>
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
package org.apache.hadoop.ipc;
|
||||
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION;
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION;
|
||||
import static org.apache.hadoop.security.SaslRpcServer.AuthMethod.KERBEROS;
|
||||
import static org.apache.hadoop.security.SaslRpcServer.AuthMethod.SIMPLE;
|
||||
import static org.apache.hadoop.security.SaslRpcServer.AuthMethod.TOKEN;
|
||||
|
@ -33,11 +34,14 @@ import java.io.DataInput;
|
|||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.lang.annotation.Annotation;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.security.Security;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
|
@ -65,6 +69,7 @@ import org.apache.hadoop.net.NetUtils;
|
|||
import org.apache.hadoop.security.KerberosInfo;
|
||||
import org.apache.hadoop.security.SaslInputStream;
|
||||
import org.apache.hadoop.security.SaslPlainServer;
|
||||
import org.apache.hadoop.security.SaslPropertiesResolver;
|
||||
import org.apache.hadoop.security.SaslRpcClient;
|
||||
import org.apache.hadoop.security.SaslRpcServer;
|
||||
import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
|
||||
|
@ -94,21 +99,29 @@ public class TestSaslRPC {
|
|||
public static Collection<Object[]> data() {
|
||||
Collection<Object[]> params = new ArrayList<Object[]>();
|
||||
for (QualityOfProtection qop : QualityOfProtection.values()) {
|
||||
params.add(new Object[]{ new QualityOfProtection[]{qop},qop });
|
||||
params.add(new Object[]{ new QualityOfProtection[]{qop},qop, null });
|
||||
}
|
||||
params.add(new Object[]{ new QualityOfProtection[]{
|
||||
QualityOfProtection.PRIVACY,QualityOfProtection.AUTHENTICATION },
|
||||
QualityOfProtection.PRIVACY });
|
||||
QualityOfProtection.PRIVACY, null});
|
||||
params.add(new Object[]{ new QualityOfProtection[]{
|
||||
QualityOfProtection.PRIVACY,QualityOfProtection.AUTHENTICATION },
|
||||
QualityOfProtection.AUTHENTICATION ,
|
||||
"org.apache.hadoop.ipc.TestSaslRPC$AuthSaslPropertiesResolver" });
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
QualityOfProtection[] qop;
|
||||
QualityOfProtection expectedQop;
|
||||
String saslPropertiesResolver ;
|
||||
|
||||
public TestSaslRPC(QualityOfProtection[] qop,
|
||||
QualityOfProtection expectedQop) {
|
||||
QualityOfProtection expectedQop,
|
||||
String saslPropertiesResolver) {
|
||||
this.qop=qop;
|
||||
this.expectedQop = expectedQop;
|
||||
this.saslPropertiesResolver = saslPropertiesResolver;
|
||||
}
|
||||
|
||||
private static final String ADDRESS = "0.0.0.0";
|
||||
|
@ -153,7 +166,11 @@ public class TestSaslRPC {
|
|||
// the specific tests for kerberos will enable kerberos. forcing it
|
||||
// for all tests will cause tests to fail if the user has a TGT
|
||||
conf.set(HADOOP_SECURITY_AUTHENTICATION, SIMPLE.toString());
|
||||
conf.set("hadoop.rpc.protection", getQOPNames(qop));
|
||||
conf.set(HADOOP_RPC_PROTECTION, getQOPNames(qop));
|
||||
if (saslPropertiesResolver != null){
|
||||
conf.set(CommonConfigurationKeys.HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS,
|
||||
saslPropertiesResolver);
|
||||
}
|
||||
UserGroupInformation.setConfiguration(conf);
|
||||
enableSecretManager = null;
|
||||
forceSecretManager = null;
|
||||
|
@ -964,6 +981,19 @@ public class TestSaslRPC {
|
|||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* Class used to test overriding QOP values using SaslPropertiesResolver
|
||||
*/
|
||||
static class AuthSaslPropertiesResolver extends SaslPropertiesResolver{
|
||||
|
||||
@Override
|
||||
public Map<String, String> getServerProperties(InetAddress address) {
|
||||
Map<String, String> newPropertes = new HashMap<String, String>(getDefaultProperties());
|
||||
newPropertes.put(Sasl.QOP, QualityOfProtection.AUTHENTICATION.getSaslQop());
|
||||
return newPropertes;
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
System.out.println("Testing Kerberos authentication over RPC");
|
||||
if (args.length != 2) {
|
||||
|
|
Loading…
Reference in New Issue