HADOOP-13429. Dispose of unnecessary SASL servers. Contributed by Daryn Sharp.

(cherry picked from commit b3018e73cc)
This commit is contained in:
Kihwal Lee 2016-08-02 10:52:10 -05:00
parent afc8da0d86
commit 983d47ecb3
2 changed files with 20 additions and 5 deletions

View File

@ -1570,6 +1570,9 @@ public abstract class Server {
// SASL wrapping is only used if the connection has a QOP, and // SASL wrapping is only used if the connection has a QOP, and
// the value is not auth. ex. auth-int & auth-priv // the value is not auth. ex. auth-int & auth-priv
useWrap = (qop != null && !"auth".equalsIgnoreCase(qop)); useWrap = (qop != null && !"auth".equalsIgnoreCase(qop));
if (!useWrap) {
disposeSasl();
}
} }
} }
@ -1650,7 +1653,7 @@ public abstract class Server {
private void switchToSimple() { private void switchToSimple() {
// disable SASL and blank out any SASL server // disable SASL and blank out any SASL server
authProtocol = AuthProtocol.NONE; authProtocol = AuthProtocol.NONE;
saslServer = null; disposeSasl();
} }
private RpcSaslProto buildSaslResponse(SaslState state, byte[] replyToken) { private RpcSaslProto buildSaslResponse(SaslState state, byte[] replyToken) {
@ -1688,6 +1691,8 @@ public abstract class Server {
try { try {
saslServer.dispose(); saslServer.dispose();
} catch (SaslException ignored) { } catch (SaslException ignored) {
} finally {
saslServer = null;
} }
} }
} }
@ -1906,7 +1911,7 @@ public abstract class Server {
.getProtocol() : null; .getProtocol() : null;
UserGroupInformation protocolUser = ProtoUtil.getUgi(connectionContext); UserGroupInformation protocolUser = ProtoUtil.getUgi(connectionContext);
if (saslServer == null) { if (authProtocol == AuthProtocol.NONE) {
user = protocolUser; user = protocolUser;
} else { } else {
// user is authenticated // user is authenticated

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.Client.ConnectionId; import org.apache.hadoop.ipc.Client.ConnectionId;
import org.apache.hadoop.ipc.Server.Connection;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.*; import org.apache.hadoop.security.*;
import org.apache.hadoop.security.SaslRpcServer.AuthMethod; import org.apache.hadoop.security.SaslRpcServer.AuthMethod;
@ -271,6 +272,15 @@ public class TestSaslRPC extends TestRpcBase {
//QOP must be auth //QOP must be auth
assertEquals(expectedQop.saslQop, assertEquals(expectedQop.saslQop,
RPC.getConnectionIdForProxy(proxy).getSaslQop()); RPC.getConnectionIdForProxy(proxy).getSaslQop());
int n = 0;
for (Connection connection : server.getConnections()) {
// only qop auth should dispose of the sasl server
boolean hasServer = (connection.saslServer != null);
assertTrue("qop:" + expectedQop + " hasServer:" + hasServer,
(expectedQop == QualityOfProtection.AUTHENTICATION) ^ hasServer);
n++;
}
assertTrue(n > 0);
proxy.ping(null, newEmptyRequest()); proxy.ping(null, newEmptyRequest());
} finally { } finally {
stop(server, proxy); stop(server, proxy);