HDFS-2180. Refactor NameNode HTTP server into new class. Contributed by Todd Lipcon.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1150960 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Todd Lipcon 2011-07-26 00:04:30 +00:00
parent d2b31fe25f
commit 01cd616d17
23 changed files with 267 additions and 170 deletions

View File

@ -599,6 +599,8 @@ Trunk (unreleased changes)
HDFS-2144. If SNN shuts down during initialization it does not log the
cause. (Ravi Prakash via atm)
HDFS-2180. Refactor NameNode HTTP server into new class. (todd)
OPTIMIZATIONS
HDFS-1458. Improve checkpoint performance by avoiding unnecessary image

View File

@ -33,6 +33,7 @@
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
/**
* A HTTPS/SSL proxy to HDFS, implementing certificate based access control.
@ -70,7 +71,7 @@ private void initialize(Configuration conf) throws IOException {
this.server = new ProxyHttpServer(sslAddr, sslConf);
this.server.setAttribute("proxy.https.port", server.getPort());
this.server.setAttribute(NameNode.NAMENODE_ADDRESS_ATTRIBUTE_KEY, nnAddr);
this.server.setAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY, nnAddr);
this.server.setAttribute(JspHelper.CURRENT_CONF, new HdfsConfiguration());
this.server.addGlobalFilter("ProxyFilter", ProxyFilter.class.getName(), null);
this.server.addServlet("listPaths", "/listPaths/*", ProxyListPathsServlet.class);

View File

@ -30,6 +30,7 @@
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.namenode.FileDataServlet;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
import org.apache.hadoop.security.UserGroupInformation;
/** {@inheritDoc} */
@ -47,7 +48,7 @@ protected URI createUri(String parent, HdfsFileStatus i, UserGroupInformation ug
dtParam=JspHelper.getDelegationTokenUrlParam(dt);
}
InetSocketAddress nnAddress = (InetSocketAddress) getServletContext()
.getAttribute(NameNode.NAMENODE_ADDRESS_ATTRIBUTE_KEY);
.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY);
String nnHostPort = nnAddress == null ? null : NameNode
.getHostPortString(nnAddress);
String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS,

View File

@ -51,7 +51,7 @@
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
import org.apache.hadoop.http.HtmlQuoting;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.net.NetUtils;
@ -487,8 +487,8 @@ private static String getNNServiceAddress(ServletContext context,
if (namenodeAddressInUrl != null) {
namenodeAddress = DFSUtil.getSocketAddress(namenodeAddressInUrl);
} else if (context != null) {
namenodeAddress = (InetSocketAddress) context
.getAttribute(NameNode.NAMENODE_ADDRESS_ATTRIBUTE_KEY);
namenodeAddress = NameNodeHttpServer.getNameNodeAddressFromContext(
context);
}
if (namenodeAddress != null) {
return (namenodeAddress.getAddress().getHostAddress() + ":"

View File

@ -111,10 +111,10 @@ protected InetSocketAddress getHttpServerAddress(Configuration conf) {
String addr = conf.get(BN_HTTP_ADDRESS_NAME_KEY, BN_HTTP_ADDRESS_DEFAULT);
return NetUtils.createSocketAddr(addr);
}
@Override // NameNode
protected void setHttpServerAddress(Configuration conf){
conf.set(BN_HTTP_ADDRESS_NAME_KEY, getHostPortString(httpAddress));
conf.set(BN_HTTP_ADDRESS_NAME_KEY, getHostPortString(getHttpAddress()));
}
@Override // NameNode

View File

@ -28,7 +28,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
@ -46,8 +45,7 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res
throws ServletException, IOException {
final UserGroupInformation ugi;
final ServletContext context = getServletContext();
final Configuration conf =
(Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
final Configuration conf = NameNodeHttpServer.getConfFromContext(context);
try {
ugi = getUGI(req, conf);
} catch(IOException ioe) {
@ -57,7 +55,8 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res
"Unable to identify or authenticate user");
return;
}
final NameNode nn = (NameNode) context.getAttribute("name.node");
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
context);
String tokenString = req.getParameter(TOKEN);
if (tokenString == null) {
resp.sendError(HttpServletResponse.SC_MULTIPLE_CHOICES,

View File

@ -101,11 +101,6 @@ private void initialize(Configuration conf) throws IOException {
String fullInfoAddr = conf.get(DFS_NAMENODE_BACKUP_HTTP_ADDRESS_KEY,
DFS_NAMENODE_BACKUP_HTTP_ADDRESS_DEFAULT);
infoBindAddress = fullInfoAddr.substring(0, fullInfoAddr.indexOf(":"));
HttpServer httpServer = backupNode.httpServer;
httpServer.setAttribute("name.system.image", getFSImage());
httpServer.setAttribute("name.conf", conf);
httpServer.addInternalServlet("getimage", "/getimage", GetImageServlet.class);
LOG.info("Checkpoint Period : " + checkpointPeriod + " secs " +
"(" + checkpointPeriod/60 + " min)");

View File

@ -75,13 +75,14 @@ protected ClientProtocol createNameNodeProxy() throws IOException {
ServletContext context = getServletContext();
// if we are running in the Name Node, use it directly rather than via
// rpc
NameNode nn = (NameNode) context.getAttribute("name.node");
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
if (nn != null) {
return nn;
}
InetSocketAddress nnAddr = (InetSocketAddress)context.getAttribute("name.node.address");
InetSocketAddress nnAddr =
NameNodeHttpServer.getNameNodeAddressFromContext(context);
Configuration conf = new HdfsConfiguration(
(Configuration)context.getAttribute(JspHelper.CURRENT_CONF));
NameNodeHttpServer.getConfFromContext(context));
return DFSUtil.createNamenode(nnAddr, conf);
}

View File

@ -37,7 +37,6 @@
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.server.common.HdfsConstants;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper;
import org.apache.hadoop.net.NetUtils;
@ -57,10 +56,10 @@ public static class RedirectServlet extends DfsServlet {
public void doGet(HttpServletRequest request, HttpServletResponse response
) throws ServletException, IOException {
final ServletContext context = getServletContext();
final Configuration conf =
(Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
final Configuration conf = NameNodeHttpServer.getConfFromContext(context);
final UserGroupInformation ugi = getUGI(request, conf);
final NameNode namenode = (NameNode)context.getAttribute("name.node");
final NameNode namenode = NameNodeHttpServer.getNameNodeFromContext(
context);
final DatanodeID datanode = NamenodeJspHelper.getRandomDatanode(namenode);
try {
final URI uri = createRedirectUri("/getFileChecksum", ugi, datanode,

View File

@ -65,7 +65,8 @@ protected URI createUri(String parent, HdfsFileStatus i, UserGroupInformation ug
}
// Add namenode address to the url params
NameNode nn = (NameNode)getServletContext().getAttribute("name.node");
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
getServletContext());
String addr = NameNode.getHostPortString(nn.getNameNodeAddress());
String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
@ -85,7 +86,8 @@ private DatanodeID pickSrcDatanode(LocatedBlocks blks, HdfsFileStatus i)
throws IOException {
if (i.getLen() == 0 || blks.getLocatedBlocks().size() <= 0) {
// pick a random datanode
NameNode nn = (NameNode)getServletContext().getAttribute("name.node");
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(
getServletContext());
return NamenodeJspHelper.getRandomDatanode(nn);
}
return JspHelper.bestNode(blks);
@ -101,8 +103,8 @@ private DatanodeID pickSrcDatanode(LocatedBlocks blks, HdfsFileStatus i)
public void doGet(final HttpServletRequest request,
final HttpServletResponse response)
throws IOException {
final Configuration conf =
(Configuration) getServletContext().getAttribute(JspHelper.CURRENT_CONF);
final Configuration conf = NameNodeHttpServer.getConfFromContext(
getServletContext());
final UserGroupInformation ugi = getUGI(request, conf);
try {

View File

@ -30,7 +30,6 @@
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.protocol.FSConstants.DatanodeReportType;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.security.UserGroupInformation;
/**
@ -49,17 +48,15 @@ public void doGet(HttpServletRequest request, HttpServletResponse response
final PrintWriter out = response.getWriter();
final InetAddress remoteAddress =
InetAddress.getByName(request.getRemoteAddr());
final Configuration conf =
(Configuration) getServletContext().getAttribute(JspHelper.CURRENT_CONF);
final ServletContext context = getServletContext();
final Configuration conf = NameNodeHttpServer.getConfFromContext(context);
final UserGroupInformation ugi = getUGI(request, conf);
try {
ugi.doAs(new PrivilegedExceptionAction<Object>() {
@Override
public Object run() throws Exception {
final ServletContext context = getServletContext();
NameNode nn = (NameNode) context.getAttribute("name.node");
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
final FSNamesystem namesystem = nn.getNamesystem();
final int totalDatanodes =

View File

@ -29,7 +29,6 @@
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
@ -49,8 +48,7 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res
throws ServletException, IOException {
final UserGroupInformation ugi;
final ServletContext context = getServletContext();
final Configuration conf =
(Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
final Configuration conf = NameNodeHttpServer.getConfFromContext(context);
try {
ugi = getUGI(req, conf);
} catch(IOException ioe) {
@ -61,7 +59,7 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res
return;
}
LOG.info("Sending token: {" + ugi.getUserName() + "," + req.getRemoteAddr() +"}");
final NameNode nn = (NameNode) context.getAttribute("name.node");
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
String renewer = req.getParameter(RENEWER);
final String renewerFinal = (renewer == null) ?
req.getUserPrincipal().getName() : renewer;

View File

@ -57,7 +57,7 @@ public void doGet(final HttpServletRequest request,
Map<String,String[]> pmap = request.getParameterMap();
try {
ServletContext context = getServletContext();
final FSImage nnImage = (FSImage)context.getAttribute("name.system.image");
final FSImage nnImage = NameNodeHttpServer.getFsImageFromContext(context);
final TransferFsImage ff = new TransferFsImage(pmap, request, response);
final Configuration conf =
(Configuration)getServletContext().getAttribute(JspHelper.CURRENT_CONF);

View File

@ -21,7 +21,6 @@
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.security.PrivilegedExceptionAction;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
@ -67,7 +66,6 @@
import org.apache.hadoop.hdfs.server.common.HdfsConstants.NamenodeRole;
import org.apache.hadoop.hdfs.server.common.HdfsConstants.StartupOption;
import org.apache.hadoop.hdfs.server.common.IncorrectVersionException;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.common.UpgradeStatusReport;
import org.apache.hadoop.hdfs.server.namenode.metrics.NameNodeMetrics;
import org.apache.hadoop.hdfs.server.protocol.BlocksWithLocations;
@ -81,7 +79,6 @@
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
import org.apache.hadoop.hdfs.server.protocol.NodeRegistration;
import org.apache.hadoop.hdfs.server.protocol.UpgradeCommand;
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.io.EnumSetWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.ProtocolSignature;
@ -95,7 +92,6 @@
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.authorize.AuthorizationException;
import org.apache.hadoop.security.authorize.ProxyUsers;
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
@ -208,8 +204,6 @@ public ProtocolSignature getProtocolSignature(String protocol,
public static final Log LOG = LogFactory.getLog(NameNode.class.getName());
public static final Log stateChangeLog = LogFactory.getLog("org.apache.hadoop.hdfs.StateChange");
public static final String NAMENODE_ADDRESS_ATTRIBUTE_KEY = "name.node.address";
protected FSNamesystem namesystem;
protected NamenodeRole role;
/** RPC server. Package-protected for use in tests. */
@ -225,9 +219,7 @@ public ProtocolSignature getProtocolSignature(String protocol,
/** RPC server for DN address */
protected InetSocketAddress serviceRPCAddress = null;
/** httpServer */
protected HttpServer httpServer;
/** HTTP server address */
protected InetSocketAddress httpAddress = null;
protected NameNodeHttpServer httpServer;
private Thread emptier;
/** only used for testing purposes */
protected boolean stopRequested = false;
@ -372,9 +364,10 @@ protected InetSocketAddress getHttpServerAddress(Configuration conf) {
return NetUtils.createSocketAddr(
conf.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, "0.0.0.0:50070"));
}
protected void setHttpServerAddress(Configuration conf){
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY, getHostPortString(httpAddress));
protected void setHttpServerAddress(Configuration conf) {
conf.set(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY,
getHostPortString(getHttpAddress()));
}
protected void loadNamesystem(Configuration conf) throws IOException {
@ -388,11 +381,20 @@ NamenodeRegistration getRegistration() {
NamenodeRegistration setRegistration() {
nodeRegistration = new NamenodeRegistration(
getHostPortString(rpcAddress),
getHostPortString(httpAddress),
getHostPortString(getHttpAddress()),
getFSImage().getStorage(), getRole(), getFSImage().getStorage().getCheckpointTime());
return nodeRegistration;
}
/**
* Login as the configured user for the NameNode.
*/
void loginAsNameNodeUser(Configuration conf) throws IOException {
InetSocketAddress socAddr = getRpcServerAddress(conf);
SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, socAddr.getHostName());
}
/**
* Initialize name-node.
*
@ -401,8 +403,7 @@ NamenodeRegistration setRegistration() {
protected void initialize(Configuration conf) throws IOException {
InetSocketAddress socAddr = getRpcServerAddress(conf);
UserGroupInformation.setConfiguration(conf);
SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, socAddr.getHostName());
loginAsNameNodeUser(conf);
int handlerCount =
conf.getInt(DFSConfigKeys.DFS_DATANODE_HANDLER_COUNT_KEY,
DFSConfigKeys.DFS_DATANODE_HANDLER_COUNT_DEFAULT);
@ -514,108 +515,9 @@ private void startTrashEmptier(Configuration conf) throws IOException {
}
private void startHttpServer(final Configuration conf) throws IOException {
final InetSocketAddress infoSocAddr = getHttpServerAddress(conf);
final String infoHost = infoSocAddr.getHostName();
if(UserGroupInformation.isSecurityEnabled()) {
String httpsUser = SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), infoHost);
if (httpsUser == null) {
LOG.warn(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY
+ " not defined in config. Starting http server as "
+ SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), rpcAddress
.getHostName())
+ ": Kerberized SSL may be not function correctly.");
} else {
// Kerberized SSL servers must be run from the host principal...
LOG.info("Logging in as " + httpsUser + " to start http server.");
SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY, infoHost);
}
}
UserGroupInformation ugi = UserGroupInformation.getLoginUser();
try {
this.httpServer = ugi.doAs(new PrivilegedExceptionAction<HttpServer>() {
@Override
public HttpServer run() throws IOException, InterruptedException {
int infoPort = infoSocAddr.getPort();
httpServer = new HttpServer("hdfs", infoHost, infoPort,
infoPort == 0, conf,
new AccessControlList(conf.get(DFSConfigKeys.DFS_ADMIN, " ")));
boolean certSSL = conf.getBoolean("dfs.https.enable", false);
boolean useKrb = UserGroupInformation.isSecurityEnabled();
if (certSSL || useKrb) {
boolean needClientAuth = conf.getBoolean(
DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf
.get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT));
Configuration sslConf = new HdfsConfiguration(false);
if (certSSL) {
sslConf.addResource(conf.get(
"dfs.https.server.keystore.resource", "ssl-server.xml"));
}
httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth,
useKrb);
// assume same ssl port for all datanodes
InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf
.get("dfs.datanode.https.address", infoHost + ":" + 50475));
httpServer.setAttribute("datanode.https.port", datanodeSslPort
.getPort());
}
httpServer.setAttribute("name.node", NameNode.this);
httpServer.setAttribute(NAMENODE_ADDRESS_ATTRIBUTE_KEY,
getNameNodeAddress());
httpServer.setAttribute("name.system.image", getFSImage());
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
httpServer.addInternalServlet("getDelegationToken",
GetDelegationTokenServlet.PATH_SPEC,
GetDelegationTokenServlet.class, true);
httpServer.addInternalServlet("renewDelegationToken",
RenewDelegationTokenServlet.PATH_SPEC,
RenewDelegationTokenServlet.class, true);
httpServer.addInternalServlet("cancelDelegationToken",
CancelDelegationTokenServlet.PATH_SPEC,
CancelDelegationTokenServlet.class, true);
httpServer.addInternalServlet("fsck", "/fsck", FsckServlet.class,
true);
httpServer.addInternalServlet("getimage", "/getimage",
GetImageServlet.class, true);
httpServer.addInternalServlet("listPaths", "/listPaths/*",
ListPathsServlet.class, false);
httpServer.addInternalServlet("data", "/data/*",
FileDataServlet.class, false);
httpServer.addInternalServlet("checksum", "/fileChecksum/*",
FileChecksumServlets.RedirectServlet.class, false);
httpServer.addInternalServlet("contentSummary", "/contentSummary/*",
ContentSummaryServlet.class, false);
httpServer.start();
// The web-server port can be ephemeral... ensure we have the correct
// info
infoPort = httpServer.getPort();
httpAddress = new InetSocketAddress(infoHost, infoPort);
setHttpServerAddress(conf);
LOG.info(getRole() + " Web-server up at: " + httpAddress);
return httpServer;
}
});
} catch (InterruptedException e) {
throw new IOException(e);
} finally {
if(UserGroupInformation.isSecurityEnabled() &&
conf.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY) != null) {
// Go back to being the correct Namenode principal
LOG.info("Logging back in as "
+ SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY), rpcAddress
.getHostName()) + " following http server start.");
SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY, rpcAddress.getHostName());
}
}
httpServer = new NameNodeHttpServer(conf, this, getHttpServerAddress(conf));
httpServer.start();
setHttpServerAddress(conf);
}
/**
@ -1420,7 +1322,7 @@ public InetSocketAddress getServiceRpcAddress() {
* @return the http address.
*/
public InetSocketAddress getHttpAddress() {
return httpAddress;
return httpServer.getHttpAddress();
}
/**

View File

@ -0,0 +1,201 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction;
import javax.servlet.ServletContext;
import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.http.HttpServer;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Encapsulates the HTTP server started by the NameNode.
*/
@InterfaceAudience.Private
public class NameNodeHttpServer {
private HttpServer httpServer;
private final Configuration conf;
private final NameNode nn;
private final Log LOG = NameNode.LOG;
private InetSocketAddress httpAddress;
private InetSocketAddress bindAddress;
public static final String NAMENODE_ADDRESS_ATTRIBUTE_KEY = "name.node.address";
public static final String FSIMAGE_ATTRIBUTE_KEY = "name.system.image";
protected static final String NAMENODE_ATTRIBUTE_KEY = "name.node";
public NameNodeHttpServer(
Configuration conf,
NameNode nn,
InetSocketAddress bindAddress) {
this.conf = conf;
this.nn = nn;
this.bindAddress = bindAddress;
}
private String getDefaultServerPrincipal() throws IOException {
return SecurityUtil.getServerPrincipal(
conf.get(DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY),
nn.getNameNodeAddress().getHostName());
}
public void start() throws IOException {
final String infoHost = bindAddress.getHostName();
if(UserGroupInformation.isSecurityEnabled()) {
String httpsUser = SecurityUtil.getServerPrincipal(conf
.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY), infoHost);
if (httpsUser == null) {
LOG.warn(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY
+ " not defined in config. Starting http server as "
+ getDefaultServerPrincipal()
+ ": Kerberized SSL may be not function correctly.");
} else {
// Kerberized SSL servers must be run from the host principal...
LOG.info("Logging in as " + httpsUser + " to start http server.");
SecurityUtil.login(conf, DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY, infoHost);
}
}
UserGroupInformation ugi = UserGroupInformation.getLoginUser();
try {
this.httpServer = ugi.doAs(new PrivilegedExceptionAction<HttpServer>() {
@Override
public HttpServer run() throws IOException, InterruptedException {
int infoPort = bindAddress.getPort();
httpServer = new HttpServer("hdfs", infoHost, infoPort,
infoPort == 0, conf,
new AccessControlList(conf.get(DFSConfigKeys.DFS_ADMIN, " ")));
boolean certSSL = conf.getBoolean("dfs.https.enable", false);
boolean useKrb = UserGroupInformation.isSecurityEnabled();
if (certSSL || useKrb) {
boolean needClientAuth = conf.getBoolean(
DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_KEY,
DFSConfigKeys.DFS_CLIENT_HTTPS_NEED_AUTH_DEFAULT);
InetSocketAddress secInfoSocAddr = NetUtils.createSocketAddr(conf
.get(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY,
DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_DEFAULT));
Configuration sslConf = new HdfsConfiguration(false);
if (certSSL) {
sslConf.addResource(conf.get(
"dfs.https.server.keystore.resource", "ssl-server.xml"));
}
httpServer.addSslListener(secInfoSocAddr, sslConf, needClientAuth,
useKrb);
// assume same ssl port for all datanodes
InetSocketAddress datanodeSslPort = NetUtils.createSocketAddr(conf
.get("dfs.datanode.https.address", infoHost + ":" + 50475));
httpServer.setAttribute("datanode.https.port", datanodeSslPort
.getPort());
}
httpServer.setAttribute(NAMENODE_ATTRIBUTE_KEY, nn);
httpServer.setAttribute(NAMENODE_ADDRESS_ATTRIBUTE_KEY,
nn.getNameNodeAddress());
httpServer.setAttribute(FSIMAGE_ATTRIBUTE_KEY, nn.getFSImage());
httpServer.setAttribute(JspHelper.CURRENT_CONF, conf);
setupServlets(httpServer);
httpServer.start();
// The web-server port can be ephemeral... ensure we have the correct
// info
infoPort = httpServer.getPort();
httpAddress = new InetSocketAddress(infoHost, infoPort);
LOG.info(nn.getRole() + " Web-server up at: " + httpAddress);
return httpServer;
}
});
} catch (InterruptedException e) {
throw new IOException(e);
} finally {
if(UserGroupInformation.isSecurityEnabled() &&
conf.get(DFSConfigKeys.DFS_NAMENODE_KRB_HTTPS_USER_NAME_KEY) != null) {
// Go back to being the correct Namenode principal
LOG.info("Logging back in as NameNode user following http server start");
nn.loginAsNameNodeUser(conf);
}
}
}
public void stop() throws Exception {
httpServer.stop();
}
public InetSocketAddress getHttpAddress() {
return httpAddress;
}
private static void setupServlets(HttpServer httpServer) {
httpServer.addInternalServlet("getDelegationToken",
GetDelegationTokenServlet.PATH_SPEC,
GetDelegationTokenServlet.class, true);
httpServer.addInternalServlet("renewDelegationToken",
RenewDelegationTokenServlet.PATH_SPEC,
RenewDelegationTokenServlet.class, true);
httpServer.addInternalServlet("cancelDelegationToken",
CancelDelegationTokenServlet.PATH_SPEC,
CancelDelegationTokenServlet.class, true);
httpServer.addInternalServlet("fsck", "/fsck", FsckServlet.class,
true);
httpServer.addInternalServlet("getimage", "/getimage",
GetImageServlet.class, true);
httpServer.addInternalServlet("listPaths", "/listPaths/*",
ListPathsServlet.class, false);
httpServer.addInternalServlet("data", "/data/*",
FileDataServlet.class, false);
httpServer.addInternalServlet("checksum", "/fileChecksum/*",
FileChecksumServlets.RedirectServlet.class, false);
httpServer.addInternalServlet("contentSummary", "/contentSummary/*",
ContentSummaryServlet.class, false);
}
public static FSImage getFsImageFromContext(ServletContext context) {
return (FSImage)context.getAttribute(FSIMAGE_ATTRIBUTE_KEY);
}
public static NameNode getNameNodeFromContext(ServletContext context) {
return (NameNode)context.getAttribute(NAMENODE_ATTRIBUTE_KEY);
}
public static Configuration getConfFromContext(ServletContext context) {
return (Configuration)context.getAttribute(JspHelper.CURRENT_CONF);
}
public static InetSocketAddress getNameNodeAddressFromContext(
ServletContext context) {
return (InetSocketAddress)context.getAttribute(
NAMENODE_ADDRESS_ATTRIBUTE_KEY);
}
}

View File

@ -385,7 +385,7 @@ static DatanodeDescriptor getRandomDatanode(final NameNode namenode) {
static void redirectToRandomDataNode(ServletContext context,
HttpServletRequest request, HttpServletResponse resp) throws IOException,
InterruptedException {
final NameNode nn = (NameNode) context.getAttribute("name.node");
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
final Configuration conf = (Configuration) context
.getAttribute(JspHelper.CURRENT_CONF);
final DatanodeID datanode = getRandomDatanode(nn);
@ -566,12 +566,12 @@ void generateNodesList(ServletContext context, JspWriter out,
HttpServletRequest request) throws IOException {
ArrayList<DatanodeDescriptor> live = new ArrayList<DatanodeDescriptor>();
ArrayList<DatanodeDescriptor> dead = new ArrayList<DatanodeDescriptor>();
final NameNode nn = (NameNode)context.getAttribute("name.node");
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
nn.getNamesystem().DFSNodesStatus(live, dead);
nn.getNamesystem().removeDecomNodeFromList(live);
nn.getNamesystem().removeDecomNodeFromList(dead);
InetSocketAddress nnSocketAddress = (InetSocketAddress) context
.getAttribute(NameNode.NAMENODE_ADDRESS_ATTRIBUTE_KEY);
.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY);
String nnaddr = nnSocketAddress.getAddress().getHostAddress() + ":"
+ nnSocketAddress.getPort();

View File

@ -47,8 +47,7 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res
throws ServletException, IOException {
final UserGroupInformation ugi;
final ServletContext context = getServletContext();
final Configuration conf =
(Configuration) context.getAttribute(JspHelper.CURRENT_CONF);
final Configuration conf = NameNodeHttpServer.getConfFromContext(context);
try {
ugi = getUGI(req, conf);
} catch(IOException ioe) {
@ -58,7 +57,7 @@ protected void doGet(final HttpServletRequest req, final HttpServletResponse res
"Unable to identify or authenticate user");
return;
}
final NameNode nn = (NameNode) context.getAttribute("name.node");
final NameNode nn = NameNodeHttpServer.getNameNodeFromContext(context);
String tokenString = req.getParameter(TOKEN);
if (tokenString == null) {
resp.sendError(HttpServletResponse.SC_MULTIPLE_CHOICES,

View File

@ -30,7 +30,7 @@
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
@ -93,7 +93,7 @@ public void testGetUgi() throws IOException {
//Set the nnaddr url parameter to null.
when(request.getParameter(JspHelper.NAMENODE_ADDRESS)).thenReturn(null);
InetSocketAddress addr = new InetSocketAddress("localhost", 2222);
when(context.getAttribute(NameNode.NAMENODE_ADDRESS_ATTRIBUTE_KEY))
when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY))
.thenReturn(addr);
verifyServiceInToken(context, request, addr.getAddress().getHostAddress()
+ ":2222");
@ -102,7 +102,7 @@ public void testGetUgi() throws IOException {
token.setService(new Text("3.3.3.3:3333"));
tokenString = token.encodeToUrlString();
//Set the name.node.address attribute in Servlet context to null
when(context.getAttribute(NameNode.NAMENODE_ADDRESS_ATTRIBUTE_KEY))
when(context.getAttribute(NameNodeHttpServer.NAMENODE_ADDRESS_ATTRIBUTE_KEY))
.thenReturn(null);
when(request.getParameter(JspHelper.DELEGATION_PARAMETER_NAME)).thenReturn(
tokenString);

View File

@ -68,7 +68,7 @@
private static final long serialVersionUID = 1L;
%>
<%
NameNode nn = (NameNode)application.getAttribute("name.node");
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(application);
String namenodeRole = nn.getRole().toString();
FSNamesystem fsn = nn.getNamesystem();

View File

@ -28,7 +28,7 @@
<%!//for java.io.Serializable
private static final long serialVersionUID = 1L;%>
<%
NameNode nn = (NameNode) application.getAttribute("name.node");
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(application);
FSNamesystem fsn = nn.getNamesystem();
String namenodeRole = nn.getRole().toString();
String namenodeLabel = nn.getNameNodeAddress().getHostName() + ":"

View File

@ -62,7 +62,7 @@
%>
<%
NameNode nn = (NameNode)application.getAttribute("name.node");
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(application);
FSNamesystem fsn = nn.getNamesystem();
Integer numCorruptBlocks = 10;

View File

@ -27,7 +27,7 @@
%>
<%
final NamenodeJspHelper.HealthJsp healthjsp = new NamenodeJspHelper.HealthJsp();
NameNode nn = (NameNode)application.getAttribute("name.node");
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(application);
FSNamesystem fsn = nn.getNamesystem();
String namenodeRole = nn.getRole().toString();
String namenodeLabel = nn.getNameNodeAddress().getHostName() + ":" + nn.getNameNodeAddress().getPort();

View File

@ -27,7 +27,7 @@
%>
<%
final NamenodeJspHelper.NodeListJsp nodelistjsp = new NamenodeJspHelper.NodeListJsp();
NameNode nn = (NameNode)application.getAttribute("name.node");
NameNode nn = NameNodeHttpServer.getNameNodeFromContext(application);
String namenodeRole = nn.getRole().toString();
FSNamesystem fsn = nn.getNamesystem();
String namenodeLabel = nn.getNameNodeAddress().getHostName() + ":" + nn.getNameNodeAddress().getPort();