From 162e9999c7703f690397ad7e17791ac5da0176f6 Mon Sep 17 00:00:00 2001 From: Takanobu Asanuma Date: Wed, 27 Mar 2019 03:27:02 +0900 Subject: [PATCH] HDFS-14037. Fix SSLFactory truststore reloader thread leak in URLConnectionFactory. (cherry picked from commit 55fb3c32fb48ca26a629d4d5f3f07e2858d09594) --- .../hdfs/web/SSLConnectionConfigurator.java | 72 +++++++++++++++++++ .../hadoop/hdfs/web/URLConnectionFactory.java | 43 +++-------- .../hadoop/hdfs/web/WebHdfsFileSystem.java | 3 + .../hdfs/web/TestURLConnectionFactory.java | 53 ++++++++++++++ .../router/RouterWebHdfsMethods.java | 2 + .../qjournal/client/QuorumJournalManager.java | 1 + 6 files changed, 139 insertions(+), 35 deletions(-) create mode 100644 hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/SSLConnectionConfigurator.java diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/SSLConnectionConfigurator.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/SSLConnectionConfigurator.java new file mode 100644 index 00000000000..7bf7ae1eee1 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/SSLConnectionConfigurator.java @@ -0,0 +1,72 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.hadoop.hdfs.web; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; +import org.apache.hadoop.security.ssl.SSLFactory; + +import javax.net.ssl.HostnameVerifier; +import javax.net.ssl.HttpsURLConnection; +import javax.net.ssl.SSLSocketFactory; +import java.io.IOException; +import java.net.HttpURLConnection; +import java.security.GeneralSecurityException; + +/** + * Configure a connection to use SSL authentication. + */ +@InterfaceAudience.Public +@InterfaceStability.Evolving + +public class SSLConnectionConfigurator implements ConnectionConfigurator { + private final SSLFactory factory; + private final SSLSocketFactory sf; + private final HostnameVerifier hv; + private final int connectTimeout; + private final int readTimeout; + + SSLConnectionConfigurator(int connectTimeout, int readTimeout, + Configuration conf) throws IOException, GeneralSecurityException { + factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf); + factory.init(); + sf = factory.createSSLSocketFactory(); + hv = factory.getHostnameVerifier(); + this.connectTimeout = connectTimeout; + this.readTimeout = readTimeout; + } + + @Override + public HttpURLConnection configure(HttpURLConnection conn) { + if (conn instanceof HttpsURLConnection) { + HttpsURLConnection c = (HttpsURLConnection) conn; + c.setSSLSocketFactory(sf); + c.setHostnameVerifier(hv); + } + conn.setConnectTimeout(connectTimeout); + conn.setReadTimeout(readTimeout); + return conn; + } + + void destroy() { + factory.destroy(); + } +} diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java index 9713932a6ae..8b6c7f7cfd0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/URLConnectionFactory.java @@ -22,11 +22,6 @@ import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; import java.net.URLConnection; -import java.security.GeneralSecurityException; - -import javax.net.ssl.HostnameVerifier; -import javax.net.ssl.HttpsURLConnection; -import javax.net.ssl.SSLSocketFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -36,7 +31,6 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.client.AuthenticatedURL; import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; -import org.apache.hadoop.security.ssl.SSLFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -103,7 +97,7 @@ public class URLConnectionFactory { final int connectTimeout, final int readTimeout, Configuration conf) { ConnectionConfigurator conn; try { - conn = newSslConnConfigurator(connectTimeout, readTimeout, conf); + conn = new SSLConnectionConfigurator(connectTimeout, readTimeout, conf); } catch (Exception e) { LOG.warn( "Cannot load customized ssl related configuration. Fallback to" + @@ -139,7 +133,7 @@ public class URLConnectionFactory { ConnectionConfigurator conn; try { ConnectionConfigurator sslConnConfigurator - = newSslConnConfigurator(connectTimeout, readTimeout, conf); + = new SSLConnectionConfigurator(connectTimeout, readTimeout, conf); conn = new OAuth2ConnectionConfigurator(conf, sslConnConfigurator); } catch (Exception e) { @@ -153,33 +147,6 @@ public class URLConnectionFactory { this.connConfigurator = connConfigurator; } - private static ConnectionConfigurator newSslConnConfigurator( - final int connectTimeout, final int readTimeout, Configuration conf) - throws IOException, GeneralSecurityException { - final SSLFactory factory; - final SSLSocketFactory sf; - final HostnameVerifier hv; - - factory = new SSLFactory(SSLFactory.Mode.CLIENT, conf); - factory.init(); - sf = factory.createSSLSocketFactory(); - hv = factory.getHostnameVerifier(); - - return new ConnectionConfigurator() { - @Override - public HttpURLConnection configure(HttpURLConnection conn) - throws IOException { - if (conn instanceof HttpsURLConnection) { - HttpsURLConnection c = (HttpsURLConnection) conn; - c.setSSLSocketFactory(sf); - c.setHostnameVerifier(hv); - } - URLConnectionFactory.setTimeouts(conn, connectTimeout, readTimeout); - return conn; - } - }; - } - /** * Opens a url with read and connect timeouts * @@ -242,4 +209,10 @@ public class URLConnectionFactory { connection.setConnectTimeout(connectTimeout); connection.setReadTimeout(readTimeout); } + + public void destroy() { + if (connConfigurator instanceof SSLConnectionConfigurator) { + ((SSLConnectionConfigurator) connConfigurator).destroy(); + } + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 4f7261359a5..bf398f71c8b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -1560,6 +1560,9 @@ public class WebHdfsFileSystem extends FileSystem } catch (IOException ioe) { LOG.debug("Token cancel failed: ", ioe); } finally { + if (connectionFactory != null) { + connectionFactory.destroy(); + } super.close(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java index e028def2595..2be8bf43362 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/web/TestURLConnectionFactory.java @@ -17,13 +17,17 @@ */ package org.apache.hadoop.hdfs.web; +import java.io.File; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; import java.util.List; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileSystem; +import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; +import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; @@ -64,4 +68,53 @@ public final class TestURLConnectionFactory { logs.getOutput().contains( "Cannot load customized ssl related configuration")); } + + @Test + public void testSSLFactoryCleanup() throws Exception { + String baseDir = GenericTestUtils.getTempPath( + TestURLConnectionFactory.class.getSimpleName()); + File base = new File(baseDir); + FileUtil.fullyDelete(base); + base.mkdirs(); + String keystoreDir = new File(baseDir).getAbsolutePath(); + String sslConfDir = KeyStoreTestUtil.getClasspathDir( + TestURLConnectionFactory.class); + Configuration conf = new Configuration(); + KeyStoreTestUtil.setupSSLConfig(keystoreDir, sslConfDir, conf, false, + true); + Configuration sslConf = KeyStoreTestUtil.getSslConfig(); + + sslConf.set("fs.defaultFS", "swebhdfs://localhost"); + FileSystem fs = FileSystem.get(sslConf); + + ThreadGroup threadGroup = Thread.currentThread().getThreadGroup(); + + while (threadGroup.getParent() != null) { + threadGroup = threadGroup.getParent(); + } + + Thread[] threads = new Thread[threadGroup.activeCount()]; + + threadGroup.enumerate(threads); + Thread reloaderThread = null; + for (Thread thread : threads) { + if ((thread.getName() != null) + && (thread.getName().contains("Truststore reloader thread"))) { + reloaderThread = thread; + } + } + Assert.assertTrue("Reloader is not alive", reloaderThread.isAlive()); + + fs.close(); + + boolean reloaderStillAlive = true; + for (int i = 0; i < 10; i++) { + reloaderStillAlive = reloaderThread.isAlive(); + if (!reloaderStillAlive) { + break; + } + Thread.sleep(1000); + } + Assert.assertFalse("Reloader is still alive", reloaderStillAlive); + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java index 34c3788db86..977341e10da 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterWebHdfsMethods.java @@ -476,11 +476,13 @@ public class RouterWebHdfsMethods extends NamenodeWebHdfsMethods { (HttpURLConnection)connectionFactory.openConnection(url); conn.setRequestMethod(method); + connectionFactory.destroy(); return conn; } catch (Exception e) { LOG.error("Cannot redirect request to {}", namenode, e); } } + connectionFactory.destroy(); return null; } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java index ba2b20a7bbc..ba4d52d8b50 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/QuorumJournalManager.java @@ -467,6 +467,7 @@ public class QuorumJournalManager implements JournalManager { @Override public void close() throws IOException { loggers.close(); + connectionFactory.destroy(); } public void selectInputStreams(Collection streams,