From 7e2aba0ab594f6939b47704f1087405c3d1c089b Mon Sep 17 00:00:00 2001 From: Jing Zhao Date: Fri, 19 Jul 2013 05:41:33 +0000 Subject: [PATCH] HDFS-5007. Replace hard-coded property keys with DFSConfigKeys fields. Contributed by Kousuke Saruta. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1504764 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../org/apache/hadoop/hdfs/DFSConfigKeys.java | 1 - .../hadoop/hdfs/server/datanode/DataNode.java | 2 +- .../hdfs/server/namenode/NameNodeHttpServer.java | 10 +++++----- .../apache/hadoop/hdfs/TestHftpFileSystem.java | 16 ++++++++-------- 5 files changed, 17 insertions(+), 15 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index c06eddcfdbc..061b67eecd9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -450,6 +450,9 @@ Release 2.1.0-beta - 2013-07-02 HADOOP-9418. Add symlink support to DistributedFileSystem (Andrew Wang via Colin Patrick McCabe) + HDFS-5007. Replace hard-coded property keys with DFSConfigKeys fields. + (Kousuke Saruta via jing9) + OPTIMIZATIONS HDFS-4465. Optimize datanode ReplicasMap and ReplicaInfo. (atm) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java index 2f6a16d92a5..5dba83d695a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java @@ -322,7 +322,6 @@ public class DFSConfigKeys extends CommonConfigurationKeys { public static final boolean DFS_SUPPORT_APPEND_DEFAULT = true; public static final String DFS_HTTPS_ENABLE_KEY = "dfs.https.enable"; public static final boolean DFS_HTTPS_ENABLE_DEFAULT = false; - public static final String DFS_HTTPS_PORT_KEY = "dfs.https.port"; public static final String DFS_DEFAULT_CHUNK_VIEW_SIZE_KEY = "dfs.default.chunk.view.size"; public static final int DFS_DEFAULT_CHUNK_VIEW_SIZE_DEFAULT = 32*1024; public static final String DFS_DATANODE_HTTPS_ADDRESS_KEY = "dfs.datanode.https.address"; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 6c358f755b6..78a14a75cad 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -397,7 +397,7 @@ public class DataNode extends Configured InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get( DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 0)); Configuration sslConf = new HdfsConfiguration(false); - sslConf.addResource(conf.get("dfs.https.server.keystore.resource", + sslConf.addResource(conf.get(DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, "ssl-server.xml")); this.infoServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth); if(LOG.isDebugEnabled()) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java index 053464730df..93726b2a0b0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java @@ -130,21 +130,21 @@ public class NameNodeHttpServer { } }; - boolean certSSL = conf.getBoolean("dfs.https.enable", false); + boolean certSSL = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, false); if (certSSL) { boolean needClientAuth = conf.getBoolean("dfs.https.need.client.auth", false); InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(infoHost + ":" + conf.get( - "dfs.https.port", infoHost + ":" + 0)); + DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, infoHost + ":" + 0)); Configuration sslConf = new Configuration(false); if (certSSL) { - sslConf.addResource(conf.get("dfs.https.server.keystore.resource", + sslConf.addResource(conf.get(DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_RESOURCE_KEY, "ssl-server.xml")); } httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth); // assume same ssl port for all datanodes InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf.get( - "dfs.datanode.https.address", infoHost + ":" + 50475)); - httpServer.setAttribute("datanode.https.port", datanodeSslPort + DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 50475)); + httpServer.setAttribute(DFSConfigKeys.DFS_DATANODE_HTTPS_PORT_KEY, datanodeSslPort .getPort()); } httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java index af62f3ca30e..5213db8431d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHftpFileSystem.java @@ -306,8 +306,8 @@ public class TestHftpFileSystem { @Test public void testHftpCustomDefaultPorts() throws IOException { Configuration conf = new Configuration(); - conf.setInt("dfs.http.port", 123); - conf.setInt("dfs.https.port", 456); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456); URI uri = URI.create("hftp://localhost"); HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf); @@ -341,8 +341,8 @@ public class TestHftpFileSystem { @Test public void testHftpCustomUriPortWithCustomDefaultPorts() throws IOException { Configuration conf = new Configuration(); - conf.setInt("dfs.http.port", 123); - conf.setInt("dfs.https.port", 456); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456); URI uri = URI.create("hftp://localhost:789"); HftpFileSystem fs = (HftpFileSystem) FileSystem.get(uri, conf); @@ -378,8 +378,8 @@ public class TestHftpFileSystem { @Test public void testHsftpCustomDefaultPorts() throws IOException { Configuration conf = new Configuration(); - conf.setInt("dfs.http.port", 123); - conf.setInt("dfs.https.port", 456); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456); URI uri = URI.create("hsftp://localhost"); HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf); @@ -413,8 +413,8 @@ public class TestHftpFileSystem { @Test public void testHsftpCustomUriPortWithCustomDefaultPorts() throws IOException { Configuration conf = new Configuration(); - conf.setInt("dfs.http.port", 123); - conf.setInt("dfs.https.port", 456); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTP_PORT_KEY, 123); + conf.setInt(DFSConfigKeys.DFS_NAMENODE_HTTPS_PORT_KEY, 456); URI uri = URI.create("hsftp://localhost:789"); HsftpFileSystem fs = (HsftpFileSystem) FileSystem.get(uri, conf);