HADOOP-10348. Deprecate hadoop.ssl.configuration in branch-2, and remove it in trunk. Contributed by Haohui Mai.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1570295 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jing Zhao 2014-02-20 18:34:24 +00:00
parent b23f6cc1f2
commit 8b196816d8
5 changed files with 6 additions and 70 deletions

View File

@ -340,6 +340,9 @@ Release 2.5.0 - UNRELEASED
IMPROVEMENTS IMPROVEMENTS
HADOOP-10348. Deprecate hadoop.ssl.configuration in branch-2, and remove
it in trunk. (Haohui Mai via jing9)
OPTIMIZATIONS OPTIMIZATIONS
BUG FIXES BUG FIXES

View File

@ -268,21 +268,11 @@ public class CommonConfigurationKeysPublic {
public static final String HADOOP_SECURITY_AUTH_TO_LOCAL = public static final String HADOOP_SECURITY_AUTH_TO_LOCAL =
"hadoop.security.auth_to_local"; "hadoop.security.auth_to_local";
public static final String HADOOP_SSL_ENABLED_KEY = "hadoop.ssl.enabled";
public static final boolean HADOOP_SSL_ENABLED_DEFAULT = false;
/** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */ /** See <a href="{@docRoot}/../core-default.html">core-default.xml</a> */
public static final String HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN = public static final String HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN =
"hadoop.kerberos.min.seconds.before.relogin"; "hadoop.kerberos.min.seconds.before.relogin";
/** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */ /** Default value for HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN */
public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT = public static final int HADOOP_KERBEROS_MIN_SECONDS_BEFORE_RELOGIN_DEFAULT =
60; 60;
// HTTP policies to be used in configuration
// Use HttpPolicy.name() instead
@Deprecated
public static final String HTTP_POLICY_HTTP_ONLY = "HTTP_ONLY";
@Deprecated
public static final String HTTP_POLICY_HTTPS_ONLY = "HTTPS_ONLY";
} }

View File

@ -377,8 +377,6 @@ public class DFSConfigKeys extends CommonConfigurationKeys {
public static final int DFS_NAMENODE_SERVICE_HANDLER_COUNT_DEFAULT = 10; public static final int DFS_NAMENODE_SERVICE_HANDLER_COUNT_DEFAULT = 10;
public static final String DFS_SUPPORT_APPEND_KEY = "dfs.support.append"; public static final String DFS_SUPPORT_APPEND_KEY = "dfs.support.append";
public static final boolean DFS_SUPPORT_APPEND_DEFAULT = true; public static final boolean DFS_SUPPORT_APPEND_DEFAULT = true;
public static final String DFS_HTTPS_ENABLE_KEY = "dfs.https.enable";
public static final boolean DFS_HTTPS_ENABLE_DEFAULT = false;
public static final String DFS_HTTP_POLICY_KEY = "dfs.http.policy"; public static final String DFS_HTTP_POLICY_KEY = "dfs.http.policy";
public static final String DFS_HTTP_POLICY_DEFAULT = HttpConfig.Policy.HTTP_ONLY.name(); public static final String DFS_HTTP_POLICY_DEFAULT = HttpConfig.Policy.HTTP_ONLY.name();
public static final String DFS_DEFAULT_CHUNK_VIEW_SIZE_KEY = "dfs.default.chunk.view.size"; public static final String DFS_DEFAULT_CHUNK_VIEW_SIZE_KEY = "dfs.default.chunk.view.size";

View File

@ -1553,44 +1553,11 @@ public class DFSUtil {
} }
/** /**
* Get http policy. Http Policy is chosen as follows: * Get http policy.
* <ol>
* <li>If hadoop.ssl.enabled is set, http endpoints are not started. Only
* https endpoints are started on configured https ports</li>
* <li>This configuration is overridden by dfs.https.enable configuration, if
* it is set to true. In that case, both http and https endpoints are stared.</li>
* <li>All the above configurations are overridden by dfs.http.policy
* configuration. With this configuration you can set http-only, https-only
* and http-and-https endpoints.</li>
* </ol>
* See hdfs-default.xml documentation for more details on each of the above
* configuration settings.
*/ */
public static HttpConfig.Policy getHttpPolicy(Configuration conf) { public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY); String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY,
if (policyStr == null) { DFSConfigKeys.DFS_HTTP_POLICY_DEFAULT);
boolean https = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);
boolean hadoopSsl = conf.getBoolean(
CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT);
if (hadoopSsl) {
LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY
+ " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
+ ".");
}
if (https) {
LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
+ " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
+ ".");
}
return (hadoopSsl || https) ? HttpConfig.Policy.HTTP_AND_HTTPS
: HttpConfig.Policy.HTTP_ONLY;
}
HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr); HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
if (policy == null) { if (policy == null) {
throw new HadoopIllegalArgumentException("Unregonized value '" throw new HadoopIllegalArgumentException("Unregonized value '"

View File

@ -17,12 +17,8 @@
*/ */
package org.apache.hadoop.hdfs; package org.apache.hadoop.hdfs;
import static org.apache.hadoop.http.HttpConfig.Policy.HTTP_AND_HTTPS;
import static org.apache.hadoop.http.HttpConfig.Policy.HTTP_ONLY;
import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
public final class TestHttpPolicy { public final class TestHttpPolicy {
@ -33,22 +29,4 @@ public final class TestHttpPolicy {
conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, "invalid"); conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, "invalid");
DFSUtil.getHttpPolicy(conf); DFSUtil.getHttpPolicy(conf);
} }
@Test
public void testDeprecatedConfiguration() {
Configuration conf = new Configuration(false);
Assert.assertSame(HTTP_ONLY, DFSUtil.getHttpPolicy(conf));
conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
Assert.assertSame(HTTP_AND_HTTPS, DFSUtil.getHttpPolicy(conf));
conf = new Configuration(false);
conf.setBoolean(DFSConfigKeys.HADOOP_SSL_ENABLED_KEY, true);
Assert.assertSame(HTTP_AND_HTTPS, DFSUtil.getHttpPolicy(conf));
conf = new Configuration(false);
conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HTTP_ONLY.name());
conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
Assert.assertSame(HTTP_ONLY, DFSUtil.getHttpPolicy(conf));
}
} }