From 3651feb49d443d099b41eb8fd493a42ab623b2f4 Mon Sep 17 00:00:00 2001 From: Haohui Mai Date: Thu, 11 Jun 2015 18:53:29 -0700 Subject: [PATCH] HDFS-8572. DN always uses HTTP/localhost@REALM principals in SPNEGO. Contributed by Haohui Mai. --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ .../hadoop/hdfs/server/datanode/DataNode.java | 46 +++------------- .../datanode/web/DatanodeHttpServer.java | 54 ++++++++++++++++++- 3 files changed, 62 insertions(+), 41 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 97ca7fe7186..2d04e9b757d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -113,6 +113,9 @@ Release 2.7.1 - UNRELEASED HDFS-8583. Document that NFS gateway does not work with rpcbind on SLES 11. (Arpit Agarwal) + HDFS-8572. DN always uses HTTP/localhost@REALM principals in SPNEGO. + (wheat9) + Release 2.7.0 - 2015-04-20 INCOMPATIBLE CHANGES diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index 2ff1be39c92..8bb807b5fe9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hdfs.server.datanode; -import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN; + import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_ADDRESS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_ADDRESS_KEY; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY; @@ -154,15 +154,12 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.NodeType; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.ReplicaState; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants.StartupOption; -import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.common.Storage; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter.SecureResources; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics; -import org.apache.hadoop.hdfs.server.namenode.FileChecksumServlets; -import org.apache.hadoop.hdfs.server.namenode.StreamFile; import org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer; import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock; import org.apache.hadoop.hdfs.server.protocol.DatanodeProtocol; @@ -171,7 +168,6 @@ import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo; import org.apache.hadoop.http.HttpConfig; -import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.ReadaheadPool; import org.apache.hadoop.io.nativeio.NativeIO; @@ -188,7 +184,6 @@ import org.apache.hadoop.security.SaslPropertiesResolver; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; -import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.tracing.TraceAdminPB.TraceAdminService; @@ -303,7 +298,6 @@ public class DataNode extends ReconfigurableBase private volatile boolean heartbeatsDisabledForTests = false; private DataStorage storage = null; - private HttpServer2 infoServer = null; private DatanodeHttpServer httpServer = null; private int infoPort; private int infoSecurePort; @@ -757,33 +751,12 @@ public class DataNode extends ReconfigurableBase */ private void startInfoServer(Configuration conf) throws IOException { - Configuration confForInfoServer = new Configuration(conf); - confForInfoServer.setInt(HttpServer2.HTTP_MAX_THREADS, 10); - HttpServer2.Builder builder = new HttpServer2.Builder() - .setName("datanode") - .setConf(conf).setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))) - .addEndpoint(URI.create("http://localhost:0")) - .setFindPort(true); - - this.infoServer = builder.build(); - - this.infoServer.addInternalServlet(null, "/streamFile/*", StreamFile.class); - this.infoServer.addInternalServlet(null, "/getFileChecksum/*", - FileChecksumServlets.GetServlet.class); - - this.infoServer.setAttribute("datanode", this); - this.infoServer.setAttribute(JspHelper.CURRENT_CONF, conf); - this.infoServer.addServlet(null, "/blockScannerReport", - BlockScanner.Servlet.class); - - this.infoServer.start(); - InetSocketAddress jettyAddr = infoServer.getConnectorAddress(0); - // SecureDataNodeStarter will bind the privileged port to the channel if // the DN is started by JSVC, pass it along. ServerSocketChannel httpServerChannel = secureResources != null ? - secureResources.getHttpServerChannel() : null; - this.httpServer = new DatanodeHttpServer(conf, jettyAddr, httpServerChannel); + secureResources.getHttpServerChannel() : null; + + this.httpServer = new DatanodeHttpServer(conf, httpServerChannel); httpServer.start(); if (httpServer.getHttpAddress() != null) { infoPort = httpServer.getHttpAddress().getPort(); @@ -1704,18 +1677,13 @@ public class DataNode extends ReconfigurableBase shutdownPeriodicScanners(); // Stop the web server - if (infoServer != null) { + if (httpServer != null) { try { - infoServer.stop(); + httpServer.close(); } catch (Exception e) { - LOG.warn("Exception shutting down DataNode", e); + LOG.warn("Exception shutting down DataNode HttpServer", e); } } - try { - httpServer.close(); - } catch (Exception e) { - LOG.warn("Exception shutting down DataNode HttpServer", e); - } if (pauseMonitor != null) { pauseMonitor.stop(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java index b620ba6ac10..4cb0571d7f8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/DatanodeHttpServer.java @@ -35,22 +35,32 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSUtil; +import org.apache.hadoop.hdfs.server.common.JspHelper; +import org.apache.hadoop.hdfs.server.datanode.BlockScanner; import org.apache.hadoop.hdfs.server.datanode.DataNode; +import org.apache.hadoop.hdfs.server.namenode.FileChecksumServlets; +import org.apache.hadoop.hdfs.server.namenode.StreamFile; import org.apache.hadoop.http.HttpConfig; +import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.net.NetUtils; +import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.security.ssl.SSLFactory; import java.io.Closeable; import java.io.IOException; import java.net.InetSocketAddress; import java.net.SocketAddress; +import java.net.URI; import java.nio.channels.ServerSocketChannel; import java.security.GeneralSecurityException; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_ADMIN; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_DEFAULT; import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY; +import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_HTTP_ADDRESS_KEY; public class DatanodeHttpServer implements Closeable { + private final HttpServer2 infoServer; private final EventLoopGroup bossGroup; private final EventLoopGroup workerGroup; private final ServerSocketChannel externalHttpChannel; @@ -64,10 +74,35 @@ public class DatanodeHttpServer implements Closeable { static final Log LOG = LogFactory.getLog(DatanodeHttpServer.class); - public DatanodeHttpServer(final Configuration conf, final InetSocketAddress - jettyAddr, final ServerSocketChannel externalHttpChannel) + public DatanodeHttpServer(final Configuration conf, + final ServerSocketChannel externalHttpChannel) throws IOException { this.conf = conf; + + Configuration confForInfoServer = new Configuration(conf); + confForInfoServer.setInt(HttpServer2.HTTP_MAX_THREADS, 10); + HttpServer2.Builder builder = new HttpServer2.Builder() + .setName("datanode") + .setConf(confForInfoServer) + .setACL(new AccessControlList(conf.get(DFS_ADMIN, " "))) + .hostName(getHostnameForSpnegoPrincipal(confForInfoServer)) + .addEndpoint(URI.create("http://localhost:0")) + .setFindPort(true); + + this.infoServer = builder.build(); + + this.infoServer.addInternalServlet(null, "/streamFile/*", StreamFile.class); + this.infoServer.addInternalServlet(null, "/getFileChecksum/*", + FileChecksumServlets.GetServlet.class); + + this.infoServer.setAttribute("datanode", this); + this.infoServer.setAttribute(JspHelper.CURRENT_CONF, conf); + this.infoServer.addServlet(null, "/blockScannerReport", + BlockScanner.Servlet.class); + + this.infoServer.start(); + final InetSocketAddress jettyAddr = infoServer.getConnectorAddress(0); + this.confForCreate = new Configuration(conf); confForCreate.set(FsPermission.UMASK_LABEL, "000"); @@ -170,5 +205,20 @@ public class DatanodeHttpServer implements Closeable { if (externalHttpChannel != null) { externalHttpChannel.close(); } + try { + infoServer.stop(); + } catch (Exception e) { + throw new IOException(e); + } + } + + private static String getHostnameForSpnegoPrincipal(Configuration conf) { + String addr = conf.getTrimmed(DFS_DATANODE_HTTP_ADDRESS_KEY, null); + if (addr == null) { + addr = conf.getTrimmed(DFS_DATANODE_HTTPS_ADDRESS_KEY, + DFS_DATANODE_HTTPS_ADDRESS_DEFAULT); + } + InetSocketAddress inetSocker = NetUtils.createSocketAddr(addr); + return inetSocker.getHostString(); } }