From 983d47ecb300dac79db22aa54a6c681dc636819b Mon Sep 17 00:00:00 2001 From: Kihwal Lee Date: Tue, 2 Aug 2016 10:52:10 -0500 Subject: [PATCH] HADOOP-13429. Dispose of unnecessary SASL servers. Contributed by Daryn Sharp. (cherry picked from commit b3018e73ccae43484d9cb85eabae814eb7f050a6) --- .../src/main/java/org/apache/hadoop/ipc/Server.java | 13 +++++++++---- .../java/org/apache/hadoop/ipc/TestSaslRPC.java | 12 +++++++++++- 2 files changed, 20 insertions(+), 5 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index a6413e73e3d..a0402c231f6 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -1569,7 +1569,10 @@ public abstract class Server { String qop = (String) saslServer.getNegotiatedProperty(Sasl.QOP); // SASL wrapping is only used if the connection has a QOP, and // the value is not auth. ex. auth-int & auth-priv - useWrap = (qop != null && !"auth".equalsIgnoreCase(qop)); + useWrap = (qop != null && !"auth".equalsIgnoreCase(qop)); + if (!useWrap) { + disposeSasl(); + } } } @@ -1650,9 +1653,9 @@ public abstract class Server { private void switchToSimple() { // disable SASL and blank out any SASL server authProtocol = AuthProtocol.NONE; - saslServer = null; + disposeSasl(); } - + private RpcSaslProto buildSaslResponse(SaslState state, byte[] replyToken) { if (LOG.isDebugEnabled()) { LOG.debug("Will send " + state + " token of size " @@ -1688,6 +1691,8 @@ public abstract class Server { try { saslServer.dispose(); } catch (SaslException ignored) { + } finally { + saslServer = null; } } } @@ -1906,7 +1911,7 @@ public abstract class Server { .getProtocol() : null; UserGroupInformation protocolUser = ProtoUtil.getUgi(connectionContext); - if (saslServer == null) { + if (authProtocol == AuthProtocol.NONE) { user = protocolUser; } else { // user is authenticated diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index ec53e8c9762..72371a7ae91 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -28,6 +28,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.io.Text; import org.apache.hadoop.ipc.Client.ConnectionId; +import org.apache.hadoop.ipc.Server.Connection; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.*; import org.apache.hadoop.security.SaslRpcServer.AuthMethod; @@ -270,7 +271,16 @@ public class TestSaslRPC extends TestRpcBase { assertEquals(TOKEN, authMethod); //QOP must be auth assertEquals(expectedQop.saslQop, - RPC.getConnectionIdForProxy(proxy).getSaslQop()); + RPC.getConnectionIdForProxy(proxy).getSaslQop()); + int n = 0; + for (Connection connection : server.getConnections()) { + // only qop auth should dispose of the sasl server + boolean hasServer = (connection.saslServer != null); + assertTrue("qop:" + expectedQop + " hasServer:" + hasServer, + (expectedQop == QualityOfProtection.AUTHENTICATION) ^ hasServer); + n++; + } + assertTrue(n > 0); proxy.ping(null, newEmptyRequest()); } finally { stop(server, proxy);