From ac0a04a6e165920a6d43c2aa3dab06ca38f3135b Mon Sep 17 00:00:00 2001 From: John Zhuge Date: Mon, 10 Jul 2017 17:22:00 -0700 Subject: [PATCH] HDFS-12114. Consistent HttpFS property names. Contributed by John Zhuge. --- .../fs/http/server/HttpFSServerWebServer.java | 13 +++++++------ .../libexec/shellprofile.d/hadoop-httpfs.sh | 17 ----------------- .../src/main/resources/httpfs-default.xml | 8 ++++---- .../src/site/markdown/ServerSetup.md.vm | 11 ++++++----- .../http/server/TestHttpFSServerWebServer.java | 2 +- 5 files changed, 18 insertions(+), 33 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java index d8706c5f895..0949f868350 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/java/org/apache/hadoop/fs/http/server/HttpFSServerWebServer.java @@ -46,17 +46,16 @@ public class HttpFSServerWebServer { private static final String HTTPFS_SITE_XML = "httpfs-site.xml"; // HTTP properties - static final String HTTP_PORT_KEY = "hadoop.httpfs.http.port"; + static final String HTTP_PORT_KEY = "httpfs.http.port"; private static final int HTTP_PORT_DEFAULT = 14000; - static final String HTTP_HOST_KEY = "hadoop.httpfs.http.host"; - private static final String HTTP_HOST_DEFAULT = "0.0.0.0"; + static final String HTTP_HOSTNAME_KEY = "httpfs.http.hostname"; + private static final String HTTP_HOSTNAME_DEFAULT = "0.0.0.0"; // SSL properties static final String SSL_ENABLED_KEY = "httpfs.ssl.enabled"; private static final boolean SSL_ENABLED_DEFAULT = false; - private static final String HTTP_ADMINS_KEY = - "hadoop.httpfs.http.administrators"; + private static final String HTTP_ADMINS_KEY = "httpfs.http.administrators"; private static final String NAME = "webhdfs"; private static final String SERVLET_PATH = "/webhdfs"; @@ -74,6 +73,8 @@ public class HttpFSServerWebServer { // Override configuration with deprecated environment variables. deprecateEnv("HTTPFS_TEMP", conf, HttpServer2.HTTP_TEMP_DIR_KEY, HTTPFS_SITE_XML); + deprecateEnv("HTTPFS_HTTP_HOSTNAME", conf, HTTP_HOSTNAME_KEY, + HTTPFS_SITE_XML); deprecateEnv("HTTPFS_HTTP_PORT", conf, HTTP_PORT_KEY, HTTPFS_SITE_XML); deprecateEnv("HTTPFS_MAX_THREADS", conf, @@ -95,7 +96,7 @@ public class HttpFSServerWebServer { SSL_ENABLED_DEFAULT); scheme = sslEnabled ? HttpServer2.HTTPS_SCHEME : HttpServer2.HTTP_SCHEME; - String host = conf.get(HTTP_HOST_KEY, HTTP_HOST_DEFAULT); + String host = conf.get(HTTP_HOSTNAME_KEY, HTTP_HOSTNAME_DEFAULT); int port = conf.getInt(HTTP_PORT_KEY, HTTP_PORT_DEFAULT); URI endpoint = new URI(scheme, null, host, port, null, null, null); diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh index a3bbe566045..239409cbf7f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/libexec/shellprofile.d/hadoop-httpfs.sh @@ -30,17 +30,6 @@ function hdfs_subcommand_httpfs . "${HADOOP_CONF_DIR}/httpfs-env.sh" fi - hadoop_deprecate_envvar HTTPFS_CONFIG HADOOP_CONF_DIR - hadoop_deprecate_envvar HTTPFS_LOG HADOOP_LOG_DIR - - hadoop_using_envvar HTTPFS_HTTP_HOSTNAME - hadoop_using_envvar HTTPFS_HTTP_PORT - hadoop_using_envvar HTTPFS_MAX_HTTP_HEADER_SIZE - hadoop_using_envvar HTTPFS_MAX_THREADS - hadoop_using_envvar HTTPFS_SSL_ENABLED - hadoop_using_envvar HTTPFS_SSL_KEYSTORE_FILE - hadoop_using_envvar HTTPFS_TEMP - # shellcheck disable=SC2034 HADOOP_SUBCMD_SUPPORTDAEMONIZATION=true # shellcheck disable=SC2034 @@ -53,12 +42,6 @@ function hdfs_subcommand_httpfs "-Dhttpfs.config.dir=${HTTPFS_CONFIG:-${HADOOP_CONF_DIR}}" hadoop_add_param HADOOP_OPTS "-Dhttpfs.log.dir" \ "-Dhttpfs.log.dir=${HTTPFS_LOG:-${HADOOP_LOG_DIR}}" - hadoop_add_param HADOOP_OPTS "-Dhttpfs.http.hostname" \ - "-Dhttpfs.http.hostname=${HTTPFS_HOST_NAME:-$(hostname -f)}" - if [[ -n "${HTTPFS_SSL_ENABLED}" ]]; then - hadoop_add_param HADOOP_OPTS "-Dhttpfs.ssl.enabled" \ - "-Dhttpfs.ssl.enabled=${HTTPFS_SSL_ENABLED}" - fi if [[ "${HADOOP_DAEMON_MODE}" == "default" ]] || [[ "${HADOOP_DAEMON_MODE}" == "start" ]]; then diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml index 490d77800ad..e4204564c25 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/resources/httpfs-default.xml @@ -16,7 +16,7 @@ - hadoop.httpfs.http.port + httpfs.http.port 14000 The HTTP port for HttpFS REST API. @@ -24,7 +24,7 @@ - hadoop.httpfs.http.host + httpfs.http.hostname 0.0.0.0 The bind host for HttpFS REST API. @@ -32,7 +32,7 @@ - hadoop.httpfs.http.administrators + httpfs.http.administrators ACL for the admins, this configuration is used to control who can access the default servlets for HttpFS server. The value @@ -46,7 +46,7 @@ - hadoop.httpfs.ssl.enabled + httpfs.ssl.enabled false Whether SSL is enabled. Default is false, i.e. disabled. diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm index 4db5d96e4d3..91ef90e4197 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/site/markdown/ServerSetup.md.vm @@ -82,7 +82,7 @@ Enable SSL in `etc/hadoop/httpfs-site.xml`: ```xml - hadoop.httpfs.ssl.enabled + httpfs.ssl.enabled true Whether SSL is enabled. Default is false, i.e. disabled. @@ -142,13 +142,14 @@ configuration properties instead. Environment Variable | Configuration Property | Configuration File ----------------------------|------------------------------|-------------------- -HTTPFS_TEMP | hadoop.http.temp.dir | httpfs-site.xml -HTTPFS_HTTP_PORT | hadoop.httpfs.http.port | httpfs-site.xml +HTTPFS_HTTP_HOSTNAME | httpfs.http.hostname | httpfs-site.xml +HTTPFS_HTTP_PORT | httpfs.http.port | httpfs-site.xml HTTPFS_MAX_HTTP_HEADER_SIZE | hadoop.http.max.request.header.size and hadoop.http.max.response.header.size | httpfs-site.xml HTTPFS_MAX_THREADS | hadoop.http.max.threads | httpfs-site.xml -HTTPFS_SSL_ENABLED | hadoop.httpfs.ssl.enabled | httpfs-site.xml +HTTPFS_SSL_ENABLED | httpfs.ssl.enabled | httpfs-site.xml HTTPFS_SSL_KEYSTORE_FILE | ssl.server.keystore.location | ssl-server.xml HTTPFS_SSL_KEYSTORE_PASS | ssl.server.keystore.password | ssl-server.xml +HTTPFS_TEMP | hadoop.http.temp.dir | httpfs-site.xml HTTP Default Services --------------------- @@ -182,7 +183,7 @@ and `/stacks`, configure the following properties in `httpfs-site.xml`: - hadoop.httpfs.http.administrators + httpfs.http.administrators ACL for the admins, this configuration is used to control who can access the default servlets for HttpFS server. The value diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java index 841c4dcf2f1..ddaeedb1b6e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/server/TestHttpFSServerWebServer.java @@ -63,7 +63,7 @@ public class TestHttpFSServerWebServer { @Before public void setUp() throws Exception { Configuration conf = new Configuration(); - conf.set(HttpFSServerWebServer.HTTP_HOST_KEY, "localhost"); + conf.set(HttpFSServerWebServer.HTTP_HOSTNAME_KEY, "localhost"); conf.setInt(HttpFSServerWebServer.HTTP_PORT_KEY, 0); Configuration sslConf = new Configuration(); webServer = new HttpFSServerWebServer(conf, sslConf);