From b344870bdd0881be715e7164b5ebff712584eae9 Mon Sep 17 00:00:00 2001 From: Alejandro Abdelnur Date: Mon, 13 Aug 2012 21:56:31 +0000 Subject: [PATCH] HADOOP-8681. add support for HTTPS to the web UIs. (tucu) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1372642 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 2 + .../fs/CommonConfigurationKeysPublic.java | 3 + .../org/apache/hadoop/http/HttpConfig.java | 48 ++++++++ .../org/apache/hadoop/http/HttpServer.java | 32 ++++- .../apache/hadoop/security/SecurityUtil.java | 19 ++- .../src/main/resources/core-default.xml | 10 ++ .../apache/hadoop/http/TestSSLHttpServer.java | 114 ++++++++++++++++++ .../server/datanode/DatanodeJspHelper.java | 23 ++-- .../server/namenode/ClusterJspHelper.java | 5 +- .../server/namenode/NamenodeJspHelper.java | 6 +- .../hdfs/server/namenode/TransferFsImage.java | 5 +- .../org/apache/hadoop/hdfs/tools/DFSck.java | 3 +- .../mapreduce/v2/app/webapp/JobBlock.java | 7 +- .../mapreduce/v2/app/webapp/NavBlock.java | 4 +- .../mapreduce/v2/app/webapp/TaskPage.java | 7 +- .../v2/app/webapp/dao/AMAttemptInfo.java | 3 +- .../hadoop/mapreduce/util/HostUtil.java | 7 +- .../mapreduce/v2/hs/webapp/HsJobBlock.java | 15 +-- .../mapreduce/v2/hs/webapp/HsTaskPage.java | 3 +- .../hadoop/mapred/ClientServiceDelegate.java | 3 +- .../rmapp/attempt/RMAppAttemptImpl.java | 7 +- .../resourcemanager/webapp/AppBlock.java | 4 +- .../resourcemanager/webapp/NodesPage.java | 4 +- .../webapp/dao/AppAttemptInfo.java | 4 +- .../resourcemanager/webapp/dao/AppInfo.java | 9 +- .../yarn/server/webproxy/ProxyUriUtils.java | 5 +- .../amfilter/AmFilterInitializer.java | 4 +- 27 files changed, 295 insertions(+), 61 deletions(-) create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java create mode 100644 hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 723ebb27c0b..ea5f7489dcd 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -22,6 +22,8 @@ Release 2.0.1-alpha - UNRELEASED HADOOP-8644. AuthenticatedURL should be able to use SSLFactory. (tucu) + HADOOP-8681. add support for HTTPS to the web UIs. (tucu) + IMPROVEMENTS HADOOP-8340. SNAPSHOT build versions should compare as less than their eventual diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java index b431664086f..dade59001bd 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java @@ -237,5 +237,8 @@ public class CommonConfigurationKeysPublic { public static final String HADOOP_SECURITY_AUTH_TO_LOCAL = "hadoop.security.auth_to_local"; + public static final String HADOOP_SSL_ENABLED_KEY = "hadoop.ssl.enabled"; + public static final boolean HADOOP_SSL_ENABLED_DEFAULT = false; + } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java new file mode 100644 index 00000000000..4ee2f5582f8 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpConfig.java @@ -0,0 +1,48 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.http; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; + +/** + * Singleton to get access to Http related configuration. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public class HttpConfig { + private static boolean sslEnabled; + + static { + Configuration conf = new Configuration(); + sslEnabled = conf.getBoolean( + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, + CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_DEFAULT); + } + + public static boolean isSecure() { + return sslEnabled; + } + + public static String getSchemePrefix() { + return (isSecure()) ? "https://" : "http://"; + } + +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java index c75c4cbacc6..c83eef13790 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer.java @@ -23,12 +23,14 @@ import java.io.PrintWriter; import java.net.BindException; import java.net.InetSocketAddress; import java.net.URL; +import java.security.GeneralSecurityException; import java.util.ArrayList; import java.util.Enumeration; import java.util.HashMap; import java.util.List; import java.util.Map; +import javax.net.ssl.SSLServerSocketFactory; import javax.servlet.Filter; import javax.servlet.FilterChain; import javax.servlet.FilterConfig; @@ -55,6 +57,7 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.authentication.server.AuthenticationFilter; import org.apache.hadoop.security.authorize.AccessControlList; +import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.util.ReflectionUtils; import org.mortbay.io.Buffer; import org.mortbay.jetty.Connector; @@ -104,6 +107,7 @@ public class HttpServer implements FilterContainer { private AccessControlList adminsAcl; + private SSLFactory sslFactory; protected final Server webServer; protected final Connector listener; protected final WebAppContext webAppContext; @@ -207,7 +211,23 @@ public class HttpServer implements FilterContainer { if(connector == null) { listenerStartedExternally = false; - listener = createBaseListener(conf); + if (HttpConfig.isSecure()) { + sslFactory = new SSLFactory(SSLFactory.Mode.SERVER, conf); + try { + sslFactory.init(); + } catch (GeneralSecurityException ex) { + throw new IOException(ex); + } + SslSocketConnector sslListener = new SslSocketConnector() { + @Override + protected SSLServerSocketFactory createFactory() throws Exception { + return sslFactory.createSSLServerSocketFactory(); + } + }; + listener = sslListener; + } else { + listener = createBaseListener(conf); + } listener.setHost(bindAddress); listener.setPort(port); } else { @@ -708,6 +728,16 @@ public class HttpServer implements FilterContainer { exception = addMultiException(exception, e); } + try { + if (sslFactory != null) { + sslFactory.destroy(); + } + } catch (Exception e) { + LOG.error("Error while destroying the SSLFactory" + + webAppContext.getDisplayName(), e); + exception = addMultiException(exception, e); + } + try { // clear & stop webAppContext attributes to avoid memory leaks. webAppContext.clearAttributes(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java index 8189cfdb279..2f65892db70 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/SecurityUtil.java @@ -40,10 +40,12 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.io.Text; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.authentication.client.AuthenticatedURL; import org.apache.hadoop.security.authentication.client.AuthenticationException; +import org.apache.hadoop.security.ssl.SSLFactory; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenInfo; @@ -65,12 +67,23 @@ public class SecurityUtil { static boolean useIpForTokenService; @VisibleForTesting static HostResolver hostResolver; - + + private static SSLFactory sslFactory; + static { - boolean useIp = new Configuration().getBoolean( + Configuration conf = new Configuration(); + boolean useIp = conf.getBoolean( CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP, CommonConfigurationKeys.HADOOP_SECURITY_TOKEN_SERVICE_USE_IP_DEFAULT); setTokenServiceUseIp(useIp); + if (HttpConfig.isSecure()) { + sslFactory = new SSLFactory(SSLFactory.Mode.CLIENT, conf); + try { + sslFactory.init(); + } catch (Exception ex) { + throw new RuntimeException(ex); + } + } } /** @@ -456,7 +469,7 @@ public class SecurityUtil { AuthenticatedURL.Token token = new AuthenticatedURL.Token(); try { - return new AuthenticatedURL().openConnection(url, token); + return new AuthenticatedURL(null, sslFactory).openConnection(url, token); } catch (AuthenticationException e) { throw new IOException("Exception trying to open authenticated connection to " + url, e); diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index 38d9bec0dfc..1a60e26a5e0 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -1068,4 +1068,14 @@ + + hadoop.ssl.enabled + false + + Whether to use SSL for the HTTP endpoints. If set to true, the + NameNode, DataNode, ResourceManager, NodeManager, HistoryServer and + MapReduceAppMaster web UIs will be served over HTTPS instead HTTP. + + + diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java new file mode 100644 index 00000000000..f5ab9572255 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/http/TestSSLHttpServer.java @@ -0,0 +1,114 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.http; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.CommonConfigurationKeysPublic; +import org.apache.hadoop.fs.FileUtil; +import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.security.ssl.SSLFactory; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; + +import javax.net.ssl.HttpsURLConnection; +import java.io.File; +import java.io.FileWriter; +import java.io.InputStream; +import java.io.Writer; +import java.net.URL; + +/** + * This testcase issues SSL certificates configures the HttpServer to serve + * HTTPS using the created certficates and calls an echo servlet using the + * corresponding HTTPS URL. + */ +public class TestSSLHttpServer extends HttpServerFunctionalTest { + private static final String BASEDIR = + System.getProperty("test.build.dir", "target/test-dir") + "/" + + TestSSLHttpServer.class.getSimpleName(); + + static final Log LOG = LogFactory.getLog(TestSSLHttpServer.class); + private static HttpServer server; + private static URL baseUrl; + + @Before + public void setup() throws Exception { + File base = new File(BASEDIR); + FileUtil.fullyDelete(base); + base.mkdirs(); + String classpathDir = + KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class); + Configuration conf = new Configuration(); + String keystoresDir = new File(BASEDIR).getAbsolutePath(); + String sslConfsDir = + KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class); + KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfsDir, conf, false); + conf.setBoolean(CommonConfigurationKeysPublic.HADOOP_SSL_ENABLED_KEY, true); + + //we do this trick because the MR AppMaster is started in another VM and + //the HttpServer configuration is not loaded from the job.xml but from the + //site.xml files in the classpath + Writer writer = new FileWriter(classpathDir + "/core-site.xml"); + conf.writeXml(writer); + writer.close(); + + conf.setInt(HttpServer.HTTP_MAX_THREADS, 10); + server = createServer("test", conf); + server.addServlet("echo", "/echo", TestHttpServer.EchoServlet.class); + server.start(); + baseUrl = new URL("https://localhost:" + server.getPort() + "/"); + LOG.info("HTTP server started: "+ baseUrl); + } + + @After + public void cleanup() throws Exception { + server.stop(); + String classpathDir = + KeyStoreTestUtil.getClasspathDir(TestSSLHttpServer.class); + new File(classpathDir + "/core-site.xml").delete(); + } + + + @Test + public void testEcho() throws Exception { + assertEquals("a:b\nc:d\n", + readOut(new URL(baseUrl, "/echo?a=b&c=d"))); + assertEquals("a:b\nc<:d\ne:>\n", + readOut(new URL(baseUrl, "/echo?a=b&c<=d&e=>"))); + } + + private static String readOut(URL url) throws Exception { + StringBuilder out = new StringBuilder(); + HttpsURLConnection conn = (HttpsURLConnection) url.openConnection(); + SSLFactory sslf = new SSLFactory(SSLFactory.Mode.CLIENT, new Configuration()); + sslf.init(); + conn.setSSLSocketFactory(sslf.createSSLSocketFactory()); + InputStream in = conn.getInputStream(); + byte[] buffer = new byte[64 * 1024]; + int len = in.read(buffer); + while (len > 0) { + out.append(new String(buffer, 0, len)); + len = in.read(buffer); + } + return out.toString(); + } + +} diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java index e09440efe50..d19e54e1122 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DatanodeJspHelper.java @@ -19,7 +19,6 @@ package org.apache.hadoop.hdfs.server.datanode; import java.io.File; import java.io.IOException; -import java.net.InetAddress; import java.net.InetSocketAddress; import java.net.URL; import java.net.URLEncoder; @@ -37,7 +36,6 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSConfigKeys; -import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; @@ -45,6 +43,7 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.block.BlockTokenSecretManager; import org.apache.hadoop.hdfs.server.common.JspHelper; +import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; @@ -140,7 +139,7 @@ public class DatanodeJspHelper { DatanodeInfo chosenNode = JspHelper.bestNode(firstBlock, conf); String fqdn = canonicalize(chosenNode.getIpAddr()); int datanodePort = chosenNode.getXferPort(); - String redirectLocation = "http://" + fqdn + ":" + String redirectLocation = HttpConfig.getSchemePrefix() + fqdn + ":" + chosenNode.getInfoPort() + "/browseBlock.jsp?blockId=" + firstBlock.getBlock().getBlockId() + "&blockSize=" + firstBlock.getBlock().getNumBytes() + "&genstamp=" @@ -220,7 +219,7 @@ public class DatanodeJspHelper { JspHelper.addTableFooter(out); } } - out.print("
Go back to DFS home"); dfs.close(); @@ -296,7 +295,7 @@ public class DatanodeJspHelper { Long.MAX_VALUE).getLocatedBlocks(); // Add the various links for looking at the file contents // URL for downloading the full file - String downloadUrl = "http://" + req.getServerName() + ":" + String downloadUrl = HttpConfig.getSchemePrefix() + req.getServerName() + ":" + req.getServerPort() + "/streamFile" + ServletUtil.encodePath(filename) + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr, true) + JspHelper.getDelegationTokenUrlParam(tokenString); @@ -314,7 +313,7 @@ public class DatanodeJspHelper { return; } String fqdn = canonicalize(chosenNode.getIpAddr()); - String tailUrl = "http://" + fqdn + ":" + chosenNode.getInfoPort() + String tailUrl = HttpConfig.getSchemePrefix() + fqdn + ":" + chosenNode.getInfoPort() + "/tail.jsp?filename=" + URLEncoder.encode(filename, "UTF-8") + "&namenodeInfoPort=" + namenodeInfoPort + "&chunkSizeToView=" + chunkSizeToView @@ -363,7 +362,7 @@ public class DatanodeJspHelper { String datanodeAddr = locs[j].getXferAddr(); datanodePort = locs[j].getXferPort(); fqdn = canonicalize(locs[j].getIpAddr()); - String blockUrl = "http://" + fqdn + ":" + locs[j].getInfoPort() + String blockUrl = HttpConfig.getSchemePrefix() + fqdn + ":" + locs[j].getInfoPort() + "/browseBlock.jsp?blockId=" + blockidstring + "&blockSize=" + blockSize + "&filename=" + URLEncoder.encode(filename, "UTF-8") @@ -374,7 +373,7 @@ public class DatanodeJspHelper { + JspHelper.getDelegationTokenUrlParam(tokenString) + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr); - String blockInfoUrl = "http://" + nnCanonicalName + ":" + String blockInfoUrl = HttpConfig.getSchemePrefix() + nnCanonicalName + ":" + namenodeInfoPort + "/block_info_xml.jsp?blockId=" + blockidstring; out.print(" " @@ -385,7 +384,7 @@ public class DatanodeJspHelper { } out.println(""); out.print("
"); - out.print("
Go back to DFS home"); dfs.close(); @@ -485,7 +484,7 @@ public class DatanodeJspHelper { String parent = new File(filename).getParent(); JspHelper.printGotoForm(out, namenodeInfoPort, tokenString, parent, nnAddr); out.print("
"); - out.print("