diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 36949a2238b..4286f8a8f8e 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -70,6 +70,10 @@ Release 2.4.0 - UNRELEASED HADOOP-3679. Fixup assert ordering in unit tests to yield meaningful error messages. (Jay Vyas via cdouglas) + HADOOP-10221. Add a plugin to specify SaslProperties for RPC protocol + based on connection properties. (Benoy Antony and Daryn Sharp via + Arpit Agarwal) + OPTIMIZATIONS BUG FIXES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index 4c62efa9fb5..53e06b7b0f2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -282,5 +282,11 @@ public class CommonConfigurationKeysPublic { public static final String HTTP_POLICY_HTTP_ONLY = "HTTP_ONLY"; @Deprecated public static final String HTTP_POLICY_HTTPS_ONLY = "HTTPS_ONLY"; + /** See core-default.xml */ + public static final String HADOOP_RPC_PROTECTION = + "hadoop.rpc.protection"; + /** Class to override Sasl Properties for a connection */ + public static final String HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS = + "hadoop.security.saslproperties.resolver.class"; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java index c36c0e4e112..88cdb2380cb 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java @@ -732,6 +732,7 @@ public AuthMethod run() // for testing remoteId.saslQop = (String)saslRpcClient.getNegotiatedProperty(Sasl.QOP); + LOG.debug("Negotiated QOP is :" + remoteId.saslQop); } else if (UserGroupInformation.isSecurityEnabled() && !fallbackAllowed) { throw new IOException("Server asks us to fall back to SIMPLE " + diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index 95c8c975740..39996a84a21 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -97,6 +97,7 @@ import org.apache.hadoop.ipc.protobuf.RpcHeaderProtos.RpcSaslProto.SaslState; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AccessControlException; +import org.apache.hadoop.security.SaslPropertiesResolver; import org.apache.hadoop.security.SaslRpcServer; import org.apache.hadoop.security.SaslRpcServer.AuthMethod; import org.apache.hadoop.security.SecurityUtil; @@ -357,6 +358,7 @@ public static boolean isRpcInvocation() { private Configuration conf; private String portRangeConfig = null; private SecretManager secretManager; + private SaslPropertiesResolver saslPropsResolver; private ServiceAuthorizationManager serviceAuthorizationManager = new ServiceAuthorizationManager(); private int maxQueueSize; @@ -1566,7 +1568,9 @@ private RpcSaslProto buildSaslNegotiateResponse() private SaslServer createSaslServer(AuthMethod authMethod) throws IOException, InterruptedException { - return new SaslRpcServer(authMethod).create(this, secretManager); + final Map saslProps = + saslPropsResolver.getServerProperties(addr); + return new SaslRpcServer(authMethod).create(this ,saslProps, secretManager); } /** @@ -2168,6 +2172,7 @@ protected Server(String bindAddress, int port, if (secretManager != null || UserGroupInformation.isSecurityEnabled()) { SaslRpcServer.init(conf); + saslPropsResolver = SaslPropertiesResolver.getInstance(conf); } this.exceptionsHandler.addTerseExceptions(StandbyException.class); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java index 5343737ec34..92a62203f0d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcClient.java @@ -89,6 +89,7 @@ public class SaslRpcClient { private final Configuration conf; private SaslClient saslClient; + private SaslPropertiesResolver saslPropsResolver; private AuthMethod authMethod; private static final RpcRequestHeaderProto saslHeader = ProtoUtil @@ -112,6 +113,7 @@ public SaslRpcClient(UserGroupInformation ugi, Class protocol, this.protocol = protocol; this.serverAddr = serverAddr; this.conf = conf; + this.saslPropsResolver = SaslPropertiesResolver.getInstance(conf); } @VisibleForTesting @@ -207,7 +209,8 @@ private SaslClient createSaslClient(SaslAuth authType) // if necessary, auth types below will verify they are valid final String saslProtocol = authType.getProtocol(); final String saslServerName = authType.getServerId(); - Map saslProperties = SaslRpcServer.SASL_PROPS; + Map saslProperties = + saslPropsResolver.getClientProperties(serverAddr.getAddress()); CallbackHandler saslCallback = null; final AuthMethod method = AuthMethod.valueOf(authType.getMethod()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java index dbce13e974d..6efb9766084 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SaslRpcServer.java @@ -70,6 +70,7 @@ public class SaslRpcServer { public static final Map SASL_PROPS = new TreeMap(); private static SaslServerFactory saslFactory; + private static SaslPropertiesResolver resolver; public static enum QualityOfProtection { AUTHENTICATION("auth"), @@ -129,7 +130,8 @@ public SaslRpcServer(AuthMethod authMethod) throws IOException { @InterfaceAudience.Private @InterfaceStability.Unstable - public SaslServer create(Connection connection, + public SaslServer create(final Connection connection, + final Map saslProperties, SecretManager secretManager ) throws IOException, InterruptedException { UserGroupInformation ugi = null; @@ -162,12 +164,12 @@ public SaslServer create(Connection connection, @Override public SaslServer run() throws SaslException { return saslFactory.createSaslServer(mechanism, protocol, serverId, - SaslRpcServer.SASL_PROPS, callback); + saslProperties, callback); } }); } else { saslServer = saslFactory.createSaslServer(mechanism, protocol, serverId, - SaslRpcServer.SASL_PROPS, callback); + saslProperties, callback); } if (saslServer == null) { throw new AccessControlException( @@ -180,17 +182,10 @@ public SaslServer run() throws SaslException { } public static void init(Configuration conf) { - String[] qop = conf.getStrings("hadoop.rpc.protection", - QualityOfProtection.AUTHENTICATION.toString()); - - for (int i=0; i < qop.length; i++) { - qop[i] = QualityOfProtection.valueOf(qop[i].toUpperCase()).getSaslQop(); - } - - SASL_PROPS.put(Sasl.QOP, StringUtils.join(",", qop)); - SASL_PROPS.put(Sasl.SERVER_AUTH, "true"); Security.addProvider(new SaslPlainServer.SecurityProvider()); - saslFactory = new FastSaslServerFactory(SASL_PROPS); + // passing null so factory is populated with all possibilities. the + // properties passed when instantiating a server are what really matter + saslFactory = new FastSaslServerFactory(null); } static String encodeIdentifier(byte[] identifier) { diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index 8f3263539ec..2368243781c 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -261,6 +261,19 @@ authentication means authentication only and no integrity or privacy; integrity implies authentication and integrity are enabled; and privacy implies all of authentication, integrity and privacy are enabled. + hadoop.security.saslproperties.resolver.class can be used to override + the hadoop.rpc.protection for a connection at the server side. + + + + + hadoop.security.saslproperties.resolver.class + + SaslPropertiesResolver used to resolve the QOP used for a + connection. If not specified, the full set of values specified in + hadoop.rpc.protection is used while determining the QOP used for the + connection. If a class is specified, then the QOP values returned by + the class will be used while determining the QOP used for the connection. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index d1bc7e785b0..903990b78a2 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -19,6 +19,7 @@ package org.apache.hadoop.ipc; import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION; +import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.HADOOP_RPC_PROTECTION; import static org.apache.hadoop.security.SaslRpcServer.AuthMethod.KERBEROS; import static org.apache.hadoop.security.SaslRpcServer.AuthMethod.SIMPLE; import static org.apache.hadoop.security.SaslRpcServer.AuthMethod.TOKEN; @@ -33,11 +34,14 @@ import java.io.DataOutput; import java.io.IOException; import java.lang.annotation.Annotation; +import java.net.InetAddress; import java.net.InetSocketAddress; import java.security.PrivilegedExceptionAction; import java.security.Security; import java.util.ArrayList; import java.util.Collection; +import java.util.HashMap; +import java.util.Map; import java.util.Set; import java.util.regex.Pattern; @@ -65,6 +69,7 @@ import org.apache.hadoop.security.KerberosInfo; import org.apache.hadoop.security.SaslInputStream; import org.apache.hadoop.security.SaslPlainServer; +import org.apache.hadoop.security.SaslPropertiesResolver; import org.apache.hadoop.security.SaslRpcClient; import org.apache.hadoop.security.SaslRpcServer; import org.apache.hadoop.security.SaslRpcServer.AuthMethod; @@ -94,21 +99,29 @@ public class TestSaslRPC { public static Collection data() { Collection params = new ArrayList(); for (QualityOfProtection qop : QualityOfProtection.values()) { - params.add(new Object[]{ new QualityOfProtection[]{qop},qop }); + params.add(new Object[]{ new QualityOfProtection[]{qop},qop, null }); } params.add(new Object[]{ new QualityOfProtection[]{ QualityOfProtection.PRIVACY,QualityOfProtection.AUTHENTICATION }, - QualityOfProtection.PRIVACY }); + QualityOfProtection.PRIVACY, null}); + params.add(new Object[]{ new QualityOfProtection[]{ + QualityOfProtection.PRIVACY,QualityOfProtection.AUTHENTICATION }, + QualityOfProtection.AUTHENTICATION , + "org.apache.hadoop.ipc.TestSaslRPC$AuthSaslPropertiesResolver" }); + return params; } QualityOfProtection[] qop; QualityOfProtection expectedQop; + String saslPropertiesResolver ; public TestSaslRPC(QualityOfProtection[] qop, - QualityOfProtection expectedQop) { + QualityOfProtection expectedQop, + String saslPropertiesResolver) { this.qop=qop; this.expectedQop = expectedQop; + this.saslPropertiesResolver = saslPropertiesResolver; } private static final String ADDRESS = "0.0.0.0"; @@ -153,7 +166,11 @@ public void setup() { // the specific tests for kerberos will enable kerberos. forcing it // for all tests will cause tests to fail if the user has a TGT conf.set(HADOOP_SECURITY_AUTHENTICATION, SIMPLE.toString()); - conf.set("hadoop.rpc.protection", getQOPNames(qop)); + conf.set(HADOOP_RPC_PROTECTION, getQOPNames(qop)); + if (saslPropertiesResolver != null){ + conf.set(CommonConfigurationKeys.HADOOP_SECURITY_SASL_PROPS_RESOLVER_CLASS, + saslPropertiesResolver); + } UserGroupInformation.setConfiguration(conf); enableSecretManager = null; forceSecretManager = null; @@ -964,6 +981,19 @@ private static void assertAuthEquals(Pattern expect, } } + /* + * Class used to test overriding QOP values using SaslPropertiesResolver + */ + static class AuthSaslPropertiesResolver extends SaslPropertiesResolver{ + + @Override + public Map getServerProperties(InetAddress address) { + Map newPropertes = new HashMap(getDefaultProperties()); + newPropertes.put(Sasl.QOP, QualityOfProtection.AUTHENTICATION.getSaslQop()); + return newPropertes; + } + } + public static void main(String[] args) throws Exception { System.out.println("Testing Kerberos authentication over RPC"); if (args.length != 2) {