HDFS-5873. Merge change r1564973 from trunk.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1564977 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jing Zhao 2014-02-05 22:53:41 +00:00
parent 1a6bd511f2
commit e7e5df5b0b
6 changed files with 89 additions and 25 deletions

View File

@ -34,13 +34,14 @@ public class HttpConfig {
HTTPS_ONLY, HTTPS_ONLY,
HTTP_AND_HTTPS; HTTP_AND_HTTPS;
private static final Policy[] VALUES = values();
public static Policy fromString(String value) { public static Policy fromString(String value) {
if (HTTPS_ONLY.name().equalsIgnoreCase(value)) { for (Policy p : VALUES) {
return HTTPS_ONLY; if (p.name().equalsIgnoreCase(value)) {
} else if (HTTP_AND_HTTPS.name().equalsIgnoreCase(value)) { return p;
return HTTP_AND_HTTPS;
} }
return HTTP_ONLY; }
return null;
} }
public boolean isHttpEnabled() { public boolean isHttpEnabled() {

View File

@ -352,7 +352,8 @@ Configuration for <<<conf/core-site.xml>>>
| | | This value is deprecated. Use dfs.http.policy | | | | This value is deprecated. Use dfs.http.policy |
*-------------------------+-------------------------+------------------------+ *-------------------------+-------------------------+------------------------+
| <<<dfs.http.policy>>> | <HTTP_ONLY> or <HTTPS_ONLY> or <HTTP_AND_HTTPS> | | | <<<dfs.http.policy>>> | <HTTP_ONLY> or <HTTPS_ONLY> or <HTTP_AND_HTTPS> | |
| | | HTTPS_ONLY turns off http access | | | | HTTPS_ONLY turns off http access. This option takes precedence over |
| | | the deprecated configuration dfs.https.enable and hadoop.ssl.enabled. |
*-------------------------+-------------------------+------------------------+ *-------------------------+-------------------------+------------------------+
| <<<dfs.namenode.https-address>>> | <nn_host_fqdn:50470> | | | <<<dfs.namenode.https-address>>> | <nn_host_fqdn:50470> | |
*-------------------------+-------------------------+------------------------+ *-------------------------+-------------------------+------------------------+

View File

@ -586,6 +586,9 @@ Release 2.3.0 - UNRELEASED
HDFS-5876. SecureDataNodeStarter does not pick up configuration in HDFS-5876. SecureDataNodeStarter does not pick up configuration in
hdfs-site.xml. (Haohui Mai via jing9) hdfs-site.xml. (Haohui Mai via jing9)
HDFS-5873. dfs.http.policy should have higher precedence over dfs.https.enable.
(Haohui Mai via jing9)
BREAKDOWN OF HDFS-2832 SUBTASKS AND RELATED JIRAS BREAKDOWN OF HDFS-2832 SUBTASKS AND RELATED JIRAS
HDFS-4985. Add storage type to the protocol and expose it in block report HDFS-4985. Add storage type to the protocol and expose it in block report

View File

@ -1554,31 +1554,34 @@ public class DFSUtil {
* configuration settings. * configuration settings.
*/ */
public static HttpConfig.Policy getHttpPolicy(Configuration conf) { public static HttpConfig.Policy getHttpPolicy(Configuration conf) {
String httpPolicy = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY, String policyStr = conf.get(DFSConfigKeys.DFS_HTTP_POLICY_KEY);
DFSConfigKeys.DFS_HTTP_POLICY_DEFAULT); if (policyStr == null) {
boolean https = conf.getBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
HttpConfig.Policy policy = HttpConfig.Policy.fromString(httpPolicy);
if (policy == HttpConfig.Policy.HTTP_ONLY) {
boolean httpsEnabled = conf.getBoolean(
DFSConfigKeys.DFS_HTTPS_ENABLE_KEY,
DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT); DFSConfigKeys.DFS_HTTPS_ENABLE_DEFAULT);
boolean hadoopSslEnabled = conf.getBoolean( boolean hadoopSsl = conf.getBoolean(
CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY, CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY,
CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT); CommonConfigurationKeys.HADOOP_SSL_ENABLED_DEFAULT);
if (hadoopSslEnabled) { if (hadoopSsl) {
LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY LOG.warn(CommonConfigurationKeys.HADOOP_SSL_ENABLED_KEY
+ " is deprecated. Please use " + " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
+ DFSConfigKeys.DFS_HTTPS_ENABLE_KEY + "."); + ".");
policy = HttpConfig.Policy.HTTPS_ONLY;
} else if (httpsEnabled) {
LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
+ " is deprecated. Please use "
+ DFSConfigKeys.DFS_HTTPS_ENABLE_KEY + ".");
policy = HttpConfig.Policy.HTTP_AND_HTTPS;
} }
if (https) {
LOG.warn(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY
+ " is deprecated. Please use " + DFSConfigKeys.DFS_HTTP_POLICY_KEY
+ ".");
}
return (hadoopSsl || https) ? HttpConfig.Policy.HTTP_AND_HTTPS
: HttpConfig.Policy.HTTP_ONLY;
}
HttpConfig.Policy policy = HttpConfig.Policy.fromString(policyStr);
if (policy == null) {
throw new HadoopIllegalArgumentException("Unregonized value '"
+ policyStr + "' for " + DFSConfigKeys.DFS_HTTP_POLICY_KEY);
} }
conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name()); conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());

View File

@ -0,0 +1,54 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.apache.hadoop.http.HttpConfig.Policy.HTTP_AND_HTTPS;
import static org.apache.hadoop.http.HttpConfig.Policy.HTTP_ONLY;
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.conf.Configuration;
import org.junit.Assert;
import org.junit.Test;
public final class TestHttpPolicy {
@Test(expected = HadoopIllegalArgumentException.class)
public void testInvalidPolicyValue() {
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, "invalid");
DFSUtil.getHttpPolicy(conf);
}
@Test
public void testDeprecatedConfiguration() {
Configuration conf = new Configuration(false);
Assert.assertSame(HTTP_ONLY, DFSUtil.getHttpPolicy(conf));
conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
Assert.assertSame(HTTP_AND_HTTPS, DFSUtil.getHttpPolicy(conf));
conf = new Configuration(false);
conf.setBoolean(DFSConfigKeys.HADOOP_SSL_ENABLED_KEY, true);
Assert.assertSame(HTTP_AND_HTTPS, DFSUtil.getHttpPolicy(conf));
conf = new Configuration(false);
conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, HTTP_ONLY.name());
conf.setBoolean(DFSConfigKeys.DFS_HTTPS_ENABLE_KEY, true);
Assert.assertSame(HTTP_ONLY, DFSUtil.getHttpPolicy(conf));
}
}

View File

@ -104,9 +104,11 @@ public class TestNameNodeHttpServer {
server.getHttpsAddress() == null)); server.getHttpsAddress() == null));
} finally { } finally {
if (server != null) {
server.stop(); server.stop();
} }
} }
}
private static boolean canAccess(String scheme, InetSocketAddress addr) { private static boolean canAccess(String scheme, InetSocketAddress addr) {
if (addr == null) if (addr == null)