diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index 11d4e2a0d4c..1b97fefd926 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -2547,14 +2547,6 @@ - - hadoop.ssl.enabled - false - - Deprecated. Use dfs.http.policy and yarn.http.policy instead. - - - hadoop.ssl.enabled.protocols TLSv1,SSLv2Hello,TLSv1.1,TLSv1.2 diff --git a/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md b/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md index 856861f29e3..c36027794a2 100644 --- a/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md +++ b/hadoop-common-project/hadoop-common/src/site/markdown/SecureMode.md @@ -267,9 +267,8 @@ The following settings allow configuring SSL access to the NameNode web UI (opti | Parameter | Value | Notes | |:-----------------------------|:------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| `dfs.http.policy` | `HTTP_ONLY` or `HTTPS_ONLY` or `HTTP_AND_HTTPS` | `HTTPS_ONLY` turns off http access. This option takes precedence over the deprecated configuration dfs.https.enable and hadoop.ssl.enabled. If using SASL to authenticate data transfer protocol instead of running DataNode as root and using privileged ports, then this property must be set to `HTTPS_ONLY` to guarantee authentication of HTTP servers. (See `dfs.data.transfer.protection`.) | +| `dfs.http.policy` | `HTTP_ONLY` or `HTTPS_ONLY` or `HTTP_AND_HTTPS` | `HTTPS_ONLY` turns off http access. If using SASL to authenticate data transfer protocol instead of running DataNode as root and using privileged ports, then this property must be set to `HTTPS_ONLY` to guarantee authentication of HTTP servers. (See `dfs.data.transfer.protection`.) | | `dfs.namenode.https-address` | `0.0.0.0:9871` | This parameter is used in non-HA mode and without federation. See [HDFS High Availability](../hadoop-hdfs/HDFSHighAvailabilityWithNFS.html#Deployment) and [HDFS Federation](../hadoop-hdfs/Federation.html#Federation_Configuration) for details. | -| `dfs.https.enable` | `true` | This value is deprecated. `Use dfs.http.policy` | ### Secondary NameNode diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java index 04b6db6f766..beb35f758a3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestCommonConfigurationFields.java @@ -211,8 +211,6 @@ public class TestCommonConfigurationFields extends TestConfigurationFieldsBase { // - org.apache.hadoop.net.NetUtils xmlPropsToSkipCompare .add("hadoop.rpc.socket.factory.class.ClientProtocol"); - // - Where is this used? - xmlPropsToSkipCompare.add("hadoop.ssl.enabled"); // Keys with no corresponding variable // - org.apache.hadoop.io.compress.bzip2.Bzip2Factory diff --git a/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json b/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json index c537195f6a6..5d79761e320 100644 --- a/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json +++ b/hadoop-tools/hadoop-sls/src/main/data/2jobs2min-rumen-jh.json @@ -4560,7 +4560,6 @@ "hadoop.hdfs.configuration.version" : "1", "dfs.datanode.balance.bandwidthPerSec" : "1048576", "mapreduce.reduce.shuffle.connect.timeout" : "180000", - "hadoop.ssl.enabled" : "false", "dfs.journalnode.rpc-address" : "0.0.0.0:8485", "yarn.nodemanager.aux-services" : "mapreduce.shuffle", "mapreduce.job.counters.max" : "120", @@ -9641,7 +9640,6 @@ "hadoop.hdfs.configuration.version" : "1", "dfs.datanode.balance.bandwidthPerSec" : "1048576", "mapreduce.reduce.shuffle.connect.timeout" : "180000", - "hadoop.ssl.enabled" : "false", "dfs.journalnode.rpc-address" : "0.0.0.0:8485", "yarn.nodemanager.aux-services" : "mapreduce.shuffle", "mapreduce.job.counters.max" : "120",