From 991faf598f7c7e42309348971a620f8a2e9666f0 Mon Sep 17 00:00:00 2001 From: Brandon Li Date: Tue, 8 Oct 2013 04:42:06 +0000 Subject: [PATCH] HDFS-5316. Merging change 1530150 from trunk git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1530153 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../server/namenode/NameNodeHttpServer.java | 20 ++++++++++++------- .../hadoop/hdfs/TestNameNodeHttpServer.java | 8 ++++---- 3 files changed, 20 insertions(+), 11 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 8100d48d107..dc5603f81d0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -107,6 +107,9 @@ Release 2.2.1 - UNRELEASED HDFS-5317. Go back to DFS Home link does not work on datanode webUI (Haohui Mai via brandonli) + HDFS-5316. Namenode ignores the default https port (Haohui Mai via + brandonli) + Release 2.2.0 - 2013-10-13 INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java index 620e4ffb33e..b5afd5f3dce 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java @@ -52,6 +52,7 @@ public class NameNodeHttpServer { private final NameNode nn; private InetSocketAddress httpAddress; + private InetSocketAddress httpsAddress; private InetSocketAddress bindAddress; public static final String NAMENODE_ADDRESS_ATTRIBUTE_KEY = "name.node.address"; @@ -99,14 +100,15 @@ public class NameNodeHttpServer { boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false); if (certSSL) { boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false); - InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(infoHost + ":" + conf.get( - DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, "0")); + httpsAddress = NetUtils.createSocketAddr(conf.get( + DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, + DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT)); + Configuration sslConf = new Configuration(false); - if (certSSL) { - sslConf.addResource(conf.get(DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, - "ssl-server.xml")); - } - httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth); + sslConf.addResource(conf.get( + DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, + DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_DEFAULT)); + httpServer.addSslListener(httpsAddress, sslConf, needClientAuth); // assume same ssl port for all datanodes InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get( DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 50475)); @@ -163,6 +165,10 @@ public class NameNodeHttpServer { return httpAddress; } + public InetSocketAddress getHttpsAddress() { + return httpsAddress; + } + /** * Sets fsimage for use by servlets. * diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestNameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestNameNodeHttpServer.java index 72cd92b3c1d..572f1d4ab5e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestNameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestNameNodeHttpServer.java @@ -31,9 +31,9 @@ public class TestNameNodeHttpServer { System.setProperty("jetty.ssl.password", "foo"); System.setProperty("jetty.ssl.keypassword", "bar"); - MiniDFSCluster dfsCluster = new MiniDFSCluster.Builder(conf) - .numDataNodes(0).build(); - dfsCluster.waitActive(); - dfsCluster.shutdown(); + MiniDFSCluster cluster = new MiniDFSCluster.Builder(conf).numDataNodes(0) + .build(); + + cluster.shutdown(); } }