diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index c7c5d114283..3dd3cf8b326 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -313,6 +313,9 @@ Release 0.23.0 - Unreleased HDFS-2317. Support read access to HDFS in webhdfs. (szetszwo) + HDFS-2338. Add configuration option to enable/disable webhdfs. + (jitendra via szetszwo) + IMPROVEMENTS HDFS-1875. MiniDFSCluster hard-codes dfs.datanode.address to localhost diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java index 82fd9fcf5b7..c10d185acfc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSConfigKeys.java @@ -101,6 +101,8 @@ public class DFSConfigKeys extends CommonConfigurationKeys { public static final int DFS_NAMENODE_REPLICATION_PENDING_TIMEOUT_SEC_DEFAULT = -1; public static final String DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY = "dfs.namenode.replication.max-streams"; public static final int DFS_NAMENODE_REPLICATION_MAX_STREAMS_DEFAULT = 2; + public static final String DFS_WEBHDFS_ENABLED_KEY = "dfs.webhdfs.enabled"; + public static final boolean DFS_WEBHDFS_ENABLED_DEFAULT = false; public static final String DFS_PERMISSIONS_ENABLED_KEY = "dfs.permissions.enabled"; public static final boolean DFS_PERMISSIONS_ENABLED_DEFAULT = true; public static final String DFS_PERMISSIONS_SUPERUSERGROUP_KEY = "dfs.permissions.superusergroup"; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index d7b38c4e30f..80d8d3d86d9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -62,6 +62,8 @@ import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_FEDERATION_NAMESERVICES; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEBHDFS_ENABLED_DEFAULT; import static org.apache.hadoop.hdfs.server.common.Util.now; import java.io.BufferedOutputStream; @@ -545,10 +547,11 @@ conf, new AccessControlList(conf.get(DFS_ADMIN, " ")), this.infoServer.addServlet(null, "/blockScannerReport", DataBlockScanner.Servlet.class); - infoServer.addJerseyResourcePackage( - DatanodeWebHdfsMethods.class.getPackage().getName() - + ";" + Param.class.getPackage().getName(), - "/" + WebHdfsFileSystem.PATH_PREFIX + "/*"); + if (conf.getBoolean(DFS_WEBHDFS_ENABLED_KEY, DFS_WEBHDFS_ENABLED_DEFAULT)) { + infoServer.addJerseyResourcePackage(DatanodeWebHdfsMethods.class + .getPackage().getName() + ";" + Param.class.getPackage().getName(), + "/" + WebHdfsFileSystem.PATH_PREFIX + "/*"); + } this.infoServer.start(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java index d8a0c500544..f9dce83fa2e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java @@ -128,7 +128,7 @@ public HttpServer run() throws IOException, InterruptedException { nn.getNameNodeAddress()); httpServer.setAttribute(FSIMAGE_ATTRIBUTE_KEY, nn.getFSImage()); httpServer.setAttribute(JspHelper.CURRENT_CONF, conf); - setupServlets(httpServer); + setupServlets(httpServer, conf); httpServer.start(); // The web-server port can be ephemeral... ensure we have the correct @@ -159,7 +159,7 @@ public InetSocketAddress getHttpAddress() { return httpAddress; } - private static void setupServlets(HttpServer httpServer) { + private static void setupServlets(HttpServer httpServer, Configuration conf) { httpServer.addInternalServlet("getDelegationToken", GetDelegationTokenServlet.PATH_SPEC, GetDelegationTokenServlet.class, true); @@ -182,10 +182,12 @@ private static void setupServlets(HttpServer httpServer) { httpServer.addInternalServlet("contentSummary", "/contentSummary/*", ContentSummaryServlet.class, false); - httpServer.addJerseyResourcePackage( - NamenodeWebHdfsMethods.class.getPackage().getName() - + ";" + Param.class.getPackage().getName(), - "/" + WebHdfsFileSystem.PATH_PREFIX + "/*"); + if (conf.getBoolean(DFSConfigKeys.DFS_WEBHDFS_ENABLED_KEY, + DFSConfigKeys.DFS_WEBHDFS_ENABLED_DEFAULT)) { + httpServer.addJerseyResourcePackage(NamenodeWebHdfsMethods.class + .getPackage().getName() + ";" + Param.class.getPackage().getName(), + "/" + WebHdfsFileSystem.PATH_PREFIX + "/*"); + } } public static FSImage getFsImageFromContext(ServletContext context) {