HDFS-5876. SecureDataNodeStarter does not pick up configuration in hdfs-site.xml. Contributed by Haohui Mai.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1564897 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
dcaaeefeaf
commit
69dbf0b225
|
@ -869,6 +869,9 @@ Release 2.3.0 - UNRELEASED
|
|||
HDFS-5399. Revisit SafeModeException and corresponding retry policies.
|
||||
(Jing Zhao via todd)
|
||||
|
||||
HDFS-5876. SecureDataNodeStarter does not pick up configuration in
|
||||
hdfs-site.xml. (Haohui Mai via jing9)
|
||||
|
||||
BREAKDOWN OF HDFS-2832 SUBTASKS AND RELATED JIRAS
|
||||
|
||||
HDFS-4985. Add storage type to the protocol and expose it in block report
|
||||
|
|
|
@ -362,13 +362,13 @@ public class DataNode extends Configured
|
|||
.setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " ")));
|
||||
|
||||
HttpConfig.Policy policy = DFSUtil.getHttpPolicy(conf);
|
||||
InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
|
||||
String infoHost = infoSocAddr.getHostName();
|
||||
|
||||
if (policy.isHttpEnabled()) {
|
||||
if (secureResources == null) {
|
||||
InetSocketAddress infoSocAddr = DataNode.getInfoAddr(conf);
|
||||
int port = infoSocAddr.getPort();
|
||||
builder.addEndpoint(URI.create("http://" + infoHost + ":" + port));
|
||||
builder.addEndpoint(URI.create("http://"
|
||||
+ NetUtils.getHostPortString(infoSocAddr)));
|
||||
if (port == 0) {
|
||||
builder.setFindPort(true);
|
||||
}
|
||||
|
@ -381,7 +381,7 @@ public class DataNode extends Configured
|
|||
|
||||
if (policy.isHttpsEnabled()) {
|
||||
InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf.get(
|
||||
DFS_DATANODE_HTTPS_ADDRESS_KEY, infoHost + ":" + 0));
|
||||
DFS_DATANODE_HTTPS_ADDRESS_KEY, DFS_DATANODE_HTTPS_ADDRESS_DEFAULT));
|
||||
|
||||
Configuration sslConf = DFSUtil.loadSslConfiguration(conf);
|
||||
DFSUtil.loadSslConfToHttpServerBuilder(builder, sslConf);
|
||||
|
@ -390,7 +390,8 @@ public class DataNode extends Configured
|
|||
if (port == 0) {
|
||||
builder.setFindPort(true);
|
||||
}
|
||||
builder.addEndpoint(URI.create("https://" + infoHost + ":" + port));
|
||||
builder.addEndpoint(URI.create("https://"
|
||||
+ NetUtils.getHostPortString(secInfoSocAddr)));
|
||||
}
|
||||
|
||||
this.infoServer = builder.build();
|
||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.commons.daemon.DaemonContext;
|
|||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.DFSUtil;
|
||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||
import org.apache.hadoop.hdfs.server.common.HdfsServerConstants;
|
||||
import org.apache.hadoop.http.HttpConfig;
|
||||
import org.apache.hadoop.http.HttpServer2;
|
||||
|
@ -62,7 +63,9 @@ public class SecureDataNodeStarter implements Daemon {
|
|||
@Override
|
||||
public void init(DaemonContext context) throws Exception {
|
||||
System.err.println("Initializing secure datanode resources");
|
||||
Configuration conf = new Configuration();
|
||||
// Create a new HdfsConfiguration object to ensure that the configuration in
|
||||
// hdfs-site.xml is picked up.
|
||||
Configuration conf = new HdfsConfiguration();
|
||||
|
||||
// Stash command-line arguments for regular datanode
|
||||
args = context.getArguments();
|
||||
|
|
|
@ -85,6 +85,7 @@ public class TestNameNodeHttpServer {
|
|||
@Test
|
||||
public void testHttpPolicy() throws Exception {
|
||||
conf.set(DFSConfigKeys.DFS_HTTP_POLICY_KEY, policy.name());
|
||||
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0");
|
||||
|
||||
InetSocketAddress addr = InetSocketAddress.createUnresolved("localhost", 0);
|
||||
NameNodeHttpServer server = null;
|
||||
|
|
Loading…
Reference in New Issue