HDFS-2235. Encode servlet paths. Contributed by Eli Collins

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1156967 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2011-08-12 05:03:05 +00:00
parent a9a4f25762
commit 4673ab17b0
13 changed files with 255 additions and 218 deletions

View File

@ -957,6 +957,8 @@ Trunk (unreleased changes)
HDFS-2229. Fix a deadlock in namenode by enforcing lock acquisition HDFS-2229. Fix a deadlock in namenode by enforcing lock acquisition
ordering. (szetszwo) ordering. (szetszwo)
HDFS-2235. Encode servlet paths. (eli)
BREAKDOWN OF HDFS-1073 SUBTASKS BREAKDOWN OF HDFS-1073 SUBTASKS
HDFS-1521. Persist transaction ID on disk between NN restarts. HDFS-1521. Persist transaction ID on disk between NN restarts.

View File

@ -61,6 +61,7 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ServletUtil;
import org.xml.sax.Attributes; import org.xml.sax.Attributes;
import org.xml.sax.InputSource; import org.xml.sax.InputSource;
import org.xml.sax.SAXException; import org.xml.sax.SAXException;
@ -241,18 +242,6 @@ public class HftpFileSystem extends FileSystem {
} }
} }
/**
* Return a URL pointing to given path on the namenode.
*
* @param p path to obtain the URL for
* @return namenode URL referring to the given path
* @throws IOException on error constructing the URL
*/
URL getNamenodeFileURL(Path p) throws IOException {
return getNamenodeURL("/data" + p.toUri().getPath(),
"ugi=" + getUgiParameter());
}
/** /**
* Return a URL pointing to given path on the namenode. * Return a URL pointing to given path on the namenode.
* *
@ -262,28 +251,25 @@ public class HftpFileSystem extends FileSystem {
* @throws IOException on error constructing the URL * @throws IOException on error constructing the URL
*/ */
URL getNamenodeURL(String path, String query) throws IOException { URL getNamenodeURL(String path, String query) throws IOException {
try { final URL url = new URL("http", nnAddr.getHostName(),
final URL url = new URI("http", null, nnAddr.getHostName(), nnAddr.getPort(), path + '?' + query);
nnAddr.getPort(), path, query, null).toURL(); if (LOG.isTraceEnabled()) {
if (LOG.isTraceEnabled()) { LOG.trace("url=" + url);
LOG.trace("url=" + url);
}
return url;
} catch (URISyntaxException e) {
throw new IOException(e);
} }
return url;
} }
/** /**
* ugi parameter for http connection * Get encoded UGI parameter string for a URL.
* *
* @return user_shortname,group1,group2... * @return user_shortname,group1,group2...
*/ */
private String getUgiParameter() { private String getEncodedUgiParameter() {
StringBuilder ugiParamenter = new StringBuilder(ugi.getShortUserName()); StringBuilder ugiParamenter = new StringBuilder(
ServletUtil.encodeQueryValue(ugi.getShortUserName()));
for(String g: ugi.getGroupNames()) { for(String g: ugi.getGroupNames()) {
ugiParamenter.append(","); ugiParamenter.append(",");
ugiParamenter.append(g); ugiParamenter.append(ServletUtil.encodeQueryValue(g));
} }
return ugiParamenter.toString(); return ugiParamenter.toString();
} }
@ -304,7 +290,7 @@ public class HftpFileSystem extends FileSystem {
*/ */
protected HttpURLConnection openConnection(String path, String query) protected HttpURLConnection openConnection(String path, String query)
throws IOException { throws IOException {
query = updateQuery(query); query = addDelegationTokenParam(query);
final URL url = getNamenodeURL(path, query); final URL url = getNamenodeURL(path, query);
final HttpURLConnection connection = (HttpURLConnection)url.openConnection(); final HttpURLConnection connection = (HttpURLConnection)url.openConnection();
try { try {
@ -316,14 +302,14 @@ public class HftpFileSystem extends FileSystem {
return connection; return connection;
} }
protected String updateQuery(String query) throws IOException { protected String addDelegationTokenParam(String query) throws IOException {
String tokenString = null; String tokenString = null;
if (UserGroupInformation.isSecurityEnabled()) { if (UserGroupInformation.isSecurityEnabled()) {
synchronized (this) { synchronized (this) {
if (delegationToken != null) { if (delegationToken != null) {
tokenString = delegationToken.encodeToUrlString(); tokenString = delegationToken.encodeToUrlString();
return (query + JspHelper.getDelegationTokenUrlParam(tokenString)); return (query + JspHelper.getDelegationTokenUrlParam(tokenString));
} // else we are talking to an insecure cluster }
} }
} }
return query; return query;
@ -331,9 +317,9 @@ public class HftpFileSystem extends FileSystem {
@Override @Override
public FSDataInputStream open(Path f, int buffersize) throws IOException { public FSDataInputStream open(Path f, int buffersize) throws IOException {
String query = "ugi=" + getUgiParameter(); String path = "/data" + ServletUtil.encodePath(f.toUri().getPath());
query = updateQuery(query); String query = addDelegationTokenParam("ugi=" + getEncodedUgiParameter());
URL u = getNamenodeURL("/data" + f.toUri().getPath(), query); URL u = getNamenodeURL(path, query);
return new FSDataInputStream(new ByteRangeInputStream(u)); return new FSDataInputStream(new ByteRangeInputStream(u));
} }
@ -382,9 +368,9 @@ public class HftpFileSystem extends FileSystem {
try { try {
XMLReader xr = XMLReaderFactory.createXMLReader(); XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this); xr.setContentHandler(this);
HttpURLConnection connection = openConnection("/listPaths" + path, HttpURLConnection connection = openConnection(
"ugi=" + getUgiParameter() + (recur? "&recursive=yes" : "")); "/listPaths" + ServletUtil.encodePath(path),
"ugi=" + getEncodedUgiParameter() + (recur ? "&recursive=yes" : ""));
InputStream resp = connection.getInputStream(); InputStream resp = connection.getInputStream();
xr.parse(new InputSource(resp)); xr.parse(new InputSource(resp));
} catch(SAXException e) { } catch(SAXException e) {
@ -447,7 +433,8 @@ public class HftpFileSystem extends FileSystem {
private FileChecksum getFileChecksum(String f) throws IOException { private FileChecksum getFileChecksum(String f) throws IOException {
final HttpURLConnection connection = openConnection( final HttpURLConnection connection = openConnection(
"/fileChecksum" + f, "ugi=" + getUgiParameter()); "/fileChecksum" + ServletUtil.encodePath(f),
"ugi=" + getEncodedUgiParameter());
try { try {
final XMLReader xr = XMLReaderFactory.createXMLReader(); final XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this); xr.setContentHandler(this);
@ -534,7 +521,8 @@ public class HftpFileSystem extends FileSystem {
*/ */
private ContentSummary getContentSummary(String path) throws IOException { private ContentSummary getContentSummary(String path) throws IOException {
final HttpURLConnection connection = openConnection( final HttpURLConnection connection = openConnection(
"/contentSummary" + path, "ugi=" + getUgiParameter()); "/contentSummary" + ServletUtil.encodePath(path),
"ugi=" + getEncodedUgiParameter());
InputStream in = null; InputStream in = null;
try { try {
in = connection.getInputStream(); in = connection.getInputStream();

View File

@ -123,42 +123,42 @@ public class HsftpFileSystem extends HftpFileSystem {
@Override @Override
protected HttpURLConnection openConnection(String path, String query) protected HttpURLConnection openConnection(String path, String query)
throws IOException { throws IOException {
query = addDelegationTokenParam(query);
final URL url = new URL("https", nnAddr.getHostName(),
nnAddr.getPort(), path + '?' + query);
HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();
// bypass hostname verification
try { try {
query = updateQuery(query);
final URL url = new URI("https", null, nnAddr.getHostName(), nnAddr
.getPort(), path, query, null).toURL();
HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
// bypass hostname verification
conn.setHostnameVerifier(new DummyHostnameVerifier()); conn.setHostnameVerifier(new DummyHostnameVerifier());
conn.setRequestMethod("GET"); conn.setRequestMethod("GET");
conn.connect(); conn.connect();
} catch (IOException ioe) {
throwIOExceptionFromConnection(conn, ioe);
}
// check cert expiration date // check cert expiration date
final int warnDays = ExpWarnDays; final int warnDays = ExpWarnDays;
if (warnDays > 0) { // make sure only check once if (warnDays > 0) { // make sure only check once
ExpWarnDays = 0; ExpWarnDays = 0;
long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY
+ System.currentTimeMillis(); + System.currentTimeMillis();
X509Certificate[] clientCerts = (X509Certificate[]) conn X509Certificate[] clientCerts = (X509Certificate[]) conn
.getLocalCertificates(); .getLocalCertificates();
if (clientCerts != null) { if (clientCerts != null) {
for (X509Certificate cert : clientCerts) { for (X509Certificate cert : clientCerts) {
long expTime = cert.getNotAfter().getTime(); long expTime = cert.getNotAfter().getTime();
if (expTime < expTimeThreshold) { if (expTime < expTimeThreshold) {
StringBuilder sb = new StringBuilder(); StringBuilder sb = new StringBuilder();
sb.append("\n Client certificate " sb.append("\n Client certificate "
+ cert.getSubjectX500Principal().getName()); + cert.getSubjectX500Principal().getName());
int dayOffSet = (int) ((expTime - System.currentTimeMillis()) / MM_SECONDS_PER_DAY); int dayOffSet = (int) ((expTime - System.currentTimeMillis()) / MM_SECONDS_PER_DAY);
sb.append(" have " + dayOffSet + " days to expire"); sb.append(" have " + dayOffSet + " days to expire");
LOG.warn(sb.toString()); LOG.warn(sb.toString());
}
} }
} }
} }
return (HttpURLConnection) conn;
} catch (URISyntaxException e) {
throw (IOException) new IOException().initCause(e);
} }
return (HttpURLConnection) conn;
} }
@Override @Override

View File

@ -47,8 +47,8 @@ import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.StringUtils;
import org.mortbay.util.URIUtil;
@InterfaceAudience.Private @InterfaceAudience.Private
public class DatanodeJspHelper { public class DatanodeJspHelper {
@ -289,7 +289,7 @@ public class DatanodeJspHelper {
// Add the various links for looking at the file contents // Add the various links for looking at the file contents
// URL for downloading the full file // URL for downloading the full file
String downloadUrl = "http://" + req.getServerName() + ":" String downloadUrl = "http://" + req.getServerName() + ":"
+ req.getServerPort() + "/streamFile" + URIUtil.encodePath(filename) + req.getServerPort() + "/streamFile" + ServletUtil.encodePath(filename)
+ JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr, true) + JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr, true)
+ JspHelper.getDelegationTokenUrlParam(tokenString); + JspHelper.getDelegationTokenUrlParam(tokenString);
out.print("<a name=\"viewOptions\"></a>"); out.print("<a name=\"viewOptions\"></a>");

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
import org.znerd.xmlenc.XMLOutputter; import org.znerd.xmlenc.XMLOutputter;
/** Servlets for file checksum */ /** Servlets for file checksum */
@ -49,8 +50,7 @@ public class ContentSummaryServlet extends DfsServlet {
ugi.doAs(new PrivilegedExceptionAction<Void>() { ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override @Override
public Void run() throws Exception { public Void run() throws Exception {
final String path = request.getPathInfo(); final String path = ServletUtil.getDecodedPath(request, "/contentSummary");
final PrintWriter out = response.getWriter(); final PrintWriter out = response.getWriter();
final XMLOutputter xml = new XMLOutputter(out, "UTF-8"); final XMLOutputter xml = new XMLOutputter(out, "UTF-8");
xml.declaration(); xml.declaration();

View File

@ -19,8 +19,6 @@ package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException; import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServlet;
@ -33,8 +31,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
@ -86,48 +82,6 @@ abstract class DfsServlet extends HttpServlet {
return DFSUtil.createNamenode(nnAddr, conf); return DFSUtil.createNamenode(nnAddr, conf);
} }
/** Create a URI for redirecting request to a datanode */
protected URI createRedirectUri(String servletpath,
UserGroupInformation ugi,
DatanodeID host,
HttpServletRequest request,
NameNode nn
) throws IOException, URISyntaxException {
final String hostname = host instanceof DatanodeInfo?
((DatanodeInfo)host).getHostName(): host.getHost();
final String scheme = request.getScheme();
final int port = "https".equals(scheme)?
(Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort();
final String filename = request.getPathInfo();
StringBuilder params = new StringBuilder();
params.append("filename=");
params.append(filename);
if (UserGroupInformation.isSecurityEnabled()) {
String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
params.append(JspHelper.getDelegationTokenUrlParam(tokenString));
} else {
params.append("&ugi=");
params.append(ugi.getShortUserName());
}
// Add namenode address to the URL params
String nnAddr = NameNode.getHostPortString(nn.getNameNodeAddress());
params.append(JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr));
return new URI(scheme, null, hostname, port, servletpath,
params.toString(), null);
}
/** Get filename from the request */
protected String getFilename(HttpServletRequest request,
HttpServletResponse response) throws IOException {
final String filename = request.getParameter("filename");
if (filename == null || filename.length() == 0) {
throw new IOException("Invalid filename");
}
return filename;
}
protected UserGroupInformation getUGI(HttpServletRequest request, protected UserGroupInformation getUGI(HttpServletRequest request,
Configuration conf) throws IOException { Configuration conf) throws IOException {
return JspHelper.getUGI(getServletContext(), request, conf); return JspHelper.getUGI(getServletContext(), request, conf);

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.net.URI; import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL;
import javax.net.SocketFactory; import javax.net.SocketFactory;
import javax.servlet.ServletContext; import javax.servlet.ServletContext;
@ -36,11 +37,14 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID; import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.common.HdfsConstants; import org.apache.hadoop.hdfs.server.common.HdfsConstants;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper; import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper;
import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
import org.znerd.xmlenc.XMLOutputter; import org.znerd.xmlenc.XMLOutputter;
/** Servlets for file checksum */ /** Servlets for file checksum */
@ -52,6 +56,32 @@ public class FileChecksumServlets {
/** For java.io.Serializable */ /** For java.io.Serializable */
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
/** Create a redirection URL */
private URL createRedirectURL(UserGroupInformation ugi, DatanodeID host,
HttpServletRequest request, NameNode nn)
throws IOException, URISyntaxException {
final String hostname = host instanceof DatanodeInfo
? ((DatanodeInfo)host).getHostName() : host.getHost();
final String scheme = request.getScheme();
final int port = "https".equals(scheme)
? (Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort();
final String encodedPath = ServletUtil.getRawPath(request, "/fileChecksum");
String dtParam = "";
if (UserGroupInformation.isSecurityEnabled()) {
String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
}
String addr = NameNode.getHostPortString(nn.getNameNodeAddress());
String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
return new URL(scheme, hostname, port,
"/getFileChecksum" + encodedPath + '?' +
"ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
dtParam + addrParam);
}
/** {@inheritDoc} */ /** {@inheritDoc} */
public void doGet(HttpServletRequest request, HttpServletResponse response public void doGet(HttpServletRequest request, HttpServletResponse response
) throws ServletException, IOException { ) throws ServletException, IOException {
@ -62,12 +92,10 @@ public class FileChecksumServlets {
context); context);
final DatanodeID datanode = NamenodeJspHelper.getRandomDatanode(namenode); final DatanodeID datanode = NamenodeJspHelper.getRandomDatanode(namenode);
try { try {
final URI uri = createRedirectUri("/getFileChecksum", ugi, datanode, response.sendRedirect(
request, namenode); createRedirectURL(ugi, datanode, request, namenode).toString());
response.sendRedirect(uri.toURL().toString());
} catch(URISyntaxException e) { } catch(URISyntaxException e) {
throw new ServletException(e); throw new ServletException(e);
//response.getWriter().println(e.toString());
} catch (IOException e) { } catch (IOException e) {
response.sendError(400, e.getMessage()); response.sendError(400, e.getMessage());
} }
@ -84,7 +112,7 @@ public class FileChecksumServlets {
public void doGet(HttpServletRequest request, HttpServletResponse response public void doGet(HttpServletRequest request, HttpServletResponse response
) throws ServletException, IOException { ) throws ServletException, IOException {
final PrintWriter out = response.getWriter(); final PrintWriter out = response.getWriter();
final String filename = getFilename(request, response); final String path = ServletUtil.getDecodedPath(request, "/getFileChecksum");
final XMLOutputter xml = new XMLOutputter(out, "UTF-8"); final XMLOutputter xml = new XMLOutputter(out, "UTF-8");
xml.declaration(); xml.declaration();
@ -103,12 +131,12 @@ public class FileChecksumServlets {
datanode, conf, getUGI(request, conf)); datanode, conf, getUGI(request, conf));
final ClientProtocol nnproxy = dfs.getNamenode(); final ClientProtocol nnproxy = dfs.getNamenode();
final MD5MD5CRC32FileChecksum checksum = DFSClient.getFileChecksum( final MD5MD5CRC32FileChecksum checksum = DFSClient.getFileChecksum(
filename, nnproxy, socketFactory, socketTimeout); path, nnproxy, socketFactory, socketTimeout);
MD5MD5CRC32FileChecksum.write(xml, checksum); MD5MD5CRC32FileChecksum.write(xml, checksum);
} catch(IOException ioe) { } catch(IOException ioe) {
writeXml(ioe, filename, xml); writeXml(ioe, path, xml);
} catch (InterruptedException e) { } catch (InterruptedException e) {
writeXml(e, filename, xml); writeXml(e, path, xml);
} }
xml.endDocument(); xml.endDocument();
} }

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.hdfs.server.namenode; package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException; import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException; import java.net.URISyntaxException;
import java.net.URL;
import java.security.PrivilegedExceptionAction; import java.security.PrivilegedExceptionAction;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
@ -35,6 +35,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
/** Redirect queries about the hosted filesystem to an appropriate datanode. /** Redirect queries about the hosted filesystem to an appropriate datanode.
* @see org.apache.hadoop.hdfs.HftpFileSystem * @see org.apache.hadoop.hdfs.HftpFileSystem
@ -44,22 +45,25 @@ public class FileDataServlet extends DfsServlet {
/** For java.io.Serializable */ /** For java.io.Serializable */
private static final long serialVersionUID = 1L; private static final long serialVersionUID = 1L;
/** Create a redirection URI */ /** Create a redirection URL */
protected URI createUri(String parent, HdfsFileStatus i, UserGroupInformation ugi, private URL createRedirectURL(String path, String encodedPath, HdfsFileStatus status,
ClientProtocol nnproxy, HttpServletRequest request, String dt) UserGroupInformation ugi, ClientProtocol nnproxy, HttpServletRequest request, String dt)
throws IOException, URISyntaxException { throws IOException, URISyntaxException {
String scheme = request.getScheme(); String scheme = request.getScheme();
final LocatedBlocks blks = nnproxy.getBlockLocations( final LocatedBlocks blks = nnproxy.getBlockLocations(
i.getFullPath(new Path(parent)).toUri().getPath(), 0, 1); status.getFullPath(new Path(path)).toUri().getPath(), 0, 1);
final DatanodeID host = pickSrcDatanode(blks, i); final DatanodeID host = pickSrcDatanode(blks, status);
final String hostname; final String hostname;
if (host instanceof DatanodeInfo) { if (host instanceof DatanodeInfo) {
hostname = ((DatanodeInfo)host).getHostName(); hostname = ((DatanodeInfo)host).getHostName();
} else { } else {
hostname = host.getHost(); hostname = host.getHost();
} }
final int port = "https".equals(scheme)
String dtParam=""; ? (Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort();
String dtParam = "";
if (dt != null) { if (dt != null) {
dtParam=JspHelper.getDelegationTokenUrlParam(dt); dtParam=JspHelper.getDelegationTokenUrlParam(dt);
} }
@ -70,12 +74,10 @@ public class FileDataServlet extends DfsServlet {
String addr = NameNode.getHostPortString(nn.getNameNodeAddress()); String addr = NameNode.getHostPortString(nn.getNameNodeAddress());
String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr); String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
return new URI(scheme, null, hostname, return new URL(scheme, hostname, port,
"https".equals(scheme) "/streamFile" + encodedPath + '?' +
? (Integer)getServletContext().getAttribute("datanode.https.port") "ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
: host.getInfoPort(), dtParam + addrParam);
"/streamFile" + i.getFullName(parent),
"ugi=" + ugi.getShortUserName() + dtParam + addrParam, null);
} }
/** Select a datanode to service this request. /** Select a datanode to service this request.
@ -112,17 +114,16 @@ public class FileDataServlet extends DfsServlet {
@Override @Override
public Void run() throws IOException { public Void run() throws IOException {
ClientProtocol nn = createNameNodeProxy(); ClientProtocol nn = createNameNodeProxy();
final String path = request.getPathInfo() != null ? request final String path = ServletUtil.getDecodedPath(request, "/data");
.getPathInfo() : "/"; final String encodedPath = ServletUtil.getRawPath(request, "/data");
String delegationToken = request String delegationToken = request
.getParameter(JspHelper.DELEGATION_PARAMETER_NAME); .getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
HdfsFileStatus info = nn.getFileInfo(path); HdfsFileStatus info = nn.getFileInfo(path);
if (info != null && !info.isDir()) { if (info != null && !info.isDir()) {
try { try {
response.sendRedirect(createUri(path, info, ugi, nn, request, response.sendRedirect(createRedirectURL(path, encodedPath,
delegationToken).toURL().toString()); info, ugi, nn, request, delegationToken).toString());
} catch (URISyntaxException e) { } catch (URISyntaxException e) {
response.getWriter().println(e.toString()); response.getWriter().println(e.toString());
} }

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.VersionInfo; import org.apache.hadoop.util.VersionInfo;
import org.znerd.xmlenc.*; import org.znerd.xmlenc.*;
@ -86,8 +87,7 @@ public class ListPathsServlet extends DfsServlet {
*/ */
protected Map<String,String> buildRoot(HttpServletRequest request, protected Map<String,String> buildRoot(HttpServletRequest request,
XMLOutputter doc) { XMLOutputter doc) {
final String path = request.getPathInfo() != null final String path = ServletUtil.getDecodedPath(request, "/listPaths");
? request.getPathInfo() : "/";
final String exclude = request.getParameter("exclude") != null final String exclude = request.getParameter("exclude") != null
? request.getParameter("exclude") : ""; ? request.getParameter("exclude") : "";
final String filter = request.getParameter("filter") != null final String filter = request.getParameter("filter") != null
@ -135,6 +135,7 @@ public class ListPathsServlet extends DfsServlet {
final Map<String, String> root = buildRoot(request, doc); final Map<String, String> root = buildRoot(request, doc);
final String path = root.get("path"); final String path = root.get("path");
final String filePath = ServletUtil.getDecodedPath(request, "/listPaths");
try { try {
final boolean recur = "yes".equals(root.get("recursive")); final boolean recur = "yes".equals(root.get("recursive"));
@ -153,7 +154,7 @@ public class ListPathsServlet extends DfsServlet {
doc.attribute(m.getKey(), m.getValue()); doc.attribute(m.getKey(), m.getValue());
} }
HdfsFileStatus base = nn.getFileInfo(path); HdfsFileStatus base = nn.getFileInfo(filePath);
if ((base != null) && base.isDir()) { if ((base != null) && base.isDir()) {
writeInfo(base.getFullPath(new Path(path)), base, doc); writeInfo(base.getFullPath(new Path(path)), base, doc);
} }

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper; import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper;
import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
import org.mortbay.jetty.InclusiveByteRange; import org.mortbay.jetty.InclusiveByteRange;
@InterfaceAudience.Private @InterfaceAudience.Private
@ -57,13 +58,14 @@ public class StreamFile extends DfsServlet {
final DataNode datanode = (DataNode) context.getAttribute("datanode"); final DataNode datanode = (DataNode) context.getAttribute("datanode");
return DatanodeJspHelper.getDFSClient(request, datanode, conf, ugi); return DatanodeJspHelper.getDFSClient(request, datanode, conf, ugi);
} }
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public void doGet(HttpServletRequest request, HttpServletResponse response) public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException { throws ServletException, IOException {
final String path = request.getPathInfo() != null ? final String path = ServletUtil.getDecodedPath(request, "/streamFile");
request.getPathInfo() : "/"; final String rawPath = ServletUtil.getRawPath(request, "/streamFile");
final String filename = JspHelper.validatePath(path); final String filename = JspHelper.validatePath(path);
final String rawFilename = JspHelper.validatePath(rawPath);
if (filename == null) { if (filename == null) {
response.setContentType("text/plain"); response.setContentType("text/plain");
PrintWriter out = response.getWriter(); PrintWriter out = response.getWriter();
@ -98,7 +100,7 @@ public class StreamFile extends DfsServlet {
} else { } else {
// No ranges, so send entire file // No ranges, so send entire file
response.setHeader("Content-Disposition", "attachment; filename=\"" + response.setHeader("Content-Disposition", "attachment; filename=\"" +
filename + "\""); rawFilename + "\"");
response.setContentType("application/octet-stream"); response.setContentType("application/octet-stream");
response.setHeader(CONTENT_LENGTH, "" + fileLen); response.setHeader(CONTENT_LENGTH, "" + fileLen);
StreamFile.copyFromOffset(in, out, 0L, fileLen); StreamFile.copyFromOffset(in, out, 0L, fileLen);

View File

@ -19,14 +19,15 @@
package org.apache.hadoop.hdfs; package org.apache.hadoop.hdfs;
import java.io.IOException; import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL; import java.net.URL;
import java.net.HttpURLConnection; import java.net.HttpURLConnection;
import java.util.Random; import java.util.Random;
import junit.extensions.TestSetup; import org.junit.Test;
import junit.framework.Test; import org.junit.BeforeClass;
import junit.framework.TestCase; import org.junit.AfterClass;
import junit.framework.TestSuite; import static org.junit.Assert.*;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
@ -39,26 +40,48 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.util.ServletUtil;
import org.apache.log4j.Level; import org.apache.log4j.Level;
/** public class TestHftpFileSystem {
* Unittest for HftpFileSystem.
*
*/
public class TestHftpFileSystem extends TestCase {
private static final Random RAN = new Random(); private static final Random RAN = new Random();
private static final Path TEST_FILE = new Path("/testfile+1");
private static Configuration config = null; private static Configuration config = null;
private static MiniDFSCluster cluster = null; private static MiniDFSCluster cluster = null;
private static FileSystem hdfs = null; private static FileSystem hdfs = null;
private static HftpFileSystem hftpFs = null; private static HftpFileSystem hftpFs = null;
private static String blockPoolId = null; private static String blockPoolId = null;
/** private static Path[] TEST_PATHS = new Path[] {
* Setup hadoop mini-cluster for test. // URI does not encode, Request#getPathInfo returns /foo
*/ new Path("/foo;bar"),
private static void oneTimeSetUp() throws IOException {
// URI does not encode, Request#getPathInfo returns verbatim
new Path("/foo+"),
new Path("/foo+bar/foo+bar"),
new Path("/foo=bar/foo=bar"),
new Path("/foo,bar/foo,bar"),
new Path("/foo@bar/foo@bar"),
new Path("/foo&bar/foo&bar"),
new Path("/foo$bar/foo$bar"),
new Path("/foo_bar/foo_bar"),
new Path("/foo~bar/foo~bar"),
new Path("/foo.bar/foo.bar"),
new Path("/foo../bar/foo../bar"),
new Path("/foo.../bar/foo.../bar"),
new Path("/foo'bar/foo'bar"),
new Path("/foo#bar/foo#bar"),
new Path("/foo!bar/foo!bar"),
// HDFS file names may not contain ":"
// URI percent encodes, Request#getPathInfo decodes
new Path("/foo bar/foo bar"),
new Path("/foo?bar/foo?bar"),
new Path("/foo\">bar/foo\">bar"),
};
@BeforeClass
public static void setUp() throws IOException {
((Log4JLogger)HftpFileSystem.LOG).getLogger().setLevel(Level.ALL); ((Log4JLogger)HftpFileSystem.LOG).getLogger().setLevel(Level.ALL);
final long seed = RAN.nextLong(); final long seed = RAN.nextLong();
@ -67,66 +90,73 @@ public class TestHftpFileSystem extends TestCase {
config = new Configuration(); config = new Configuration();
config.set(DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY, "localhost"); config.set(DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY, "localhost");
cluster = new MiniDFSCluster.Builder(config).numDataNodes(2).build(); cluster = new MiniDFSCluster.Builder(config).numDataNodes(2).build();
hdfs = cluster.getFileSystem(); hdfs = cluster.getFileSystem();
blockPoolId = cluster.getNamesystem().getBlockPoolId(); blockPoolId = cluster.getNamesystem().getBlockPoolId();
final String hftpuri = final String hftpUri =
"hftp://" + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY); "hftp://" + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
hftpFs = (HftpFileSystem) new Path(hftpuri).getFileSystem(config); hftpFs = (HftpFileSystem) new Path(hftpUri).getFileSystem(config);
} }
/** @AfterClass
* Shutdown the hadoop mini-cluster. public static void tearDown() throws IOException {
*/
private static void oneTimeTearDown() throws IOException {
hdfs.close(); hdfs.close();
hftpFs.close(); hftpFs.close();
cluster.shutdown(); cluster.shutdown();
} }
public TestHftpFileSystem(String name) {
super(name);
}
/** /**
* For one time setup / teardown. * Test file creation and access with file names that need encoding.
*/ */
public static Test suite() { @Test
TestSuite suite = new TestSuite(); public void testFileNameEncoding() throws IOException, URISyntaxException {
for (Path p : TEST_PATHS) {
suite.addTestSuite(TestHftpFileSystem.class); // Create and access the path (data and streamFile servlets)
FSDataOutputStream out = hdfs.create(p, true);
return new TestSetup(suite) { out.writeBytes("0123456789");
@Override out.close();
protected void setUp() throws IOException { FSDataInputStream in = hftpFs.open(p);
oneTimeSetUp(); assertEquals('0', in.read());
}
// Check the file status matches the path. Hftp returns a FileStatus
@Override // with the entire URI, extract the path part.
protected void tearDown() throws IOException { assertEquals(p, new Path(hftpFs.getFileStatus(p).getPath().toUri().getPath()));
oneTimeTearDown();
} // Test list status (listPath servlet)
}; assertEquals(1, hftpFs.listStatus(p).length);
}
// Test content summary (contentSummary servlet)
public void testDataNodeRedirect() throws Exception { assertNotNull("No content summary", hftpFs.getContentSummary(p));
if (hdfs.exists(TEST_FILE)) {
hdfs.delete(TEST_FILE, true); // Test checksums (fileChecksum and getFileChecksum servlets)
assertNotNull("No file checksum", hftpFs.getFileChecksum(p));
} }
FSDataOutputStream out = hdfs.create(TEST_FILE, (short) 1); }
private void testDataNodeRedirect(Path path) throws IOException {
// Create the file
if (hdfs.exists(path)) {
hdfs.delete(path, true);
}
FSDataOutputStream out = hdfs.create(path, (short)1);
out.writeBytes("0123456789"); out.writeBytes("0123456789");
out.close(); out.close();
// Get the path's block location so we can determine
// if we were redirected to the right DN.
BlockLocation[] locations = BlockLocation[] locations =
hdfs.getFileBlockLocations(TEST_FILE, 0, 10); hdfs.getFileBlockLocations(path, 0, 10);
String locationName = locations[0].getNames()[0]; String locationName = locations[0].getNames()[0];
URL u = hftpFs.getNamenodeFileURL(TEST_FILE);
// Connect to the NN to get redirected
URL u = hftpFs.getNamenodeURL(
"/data" + ServletUtil.encodePath(path.toUri().getPath()),
"ugi=userx,groupy");
HttpURLConnection conn = (HttpURLConnection)u.openConnection(); HttpURLConnection conn = (HttpURLConnection)u.openConnection();
HttpURLConnection.setFollowRedirects(true); HttpURLConnection.setFollowRedirects(true);
conn.connect(); conn.connect();
conn.getInputStream(); conn.getInputStream();
boolean checked = false; boolean checked = false;
// Find the datanode that has the block according to locations // Find the datanode that has the block according to locations
// and check that the URL was redirected to this DN's info port // and check that the URL was redirected to this DN's info port
@ -138,19 +168,32 @@ public class TestHftpFileSystem extends TestCase {
assertEquals(dnR.getInfoPort(), conn.getURL().getPort()); assertEquals(dnR.getInfoPort(), conn.getURL().getPort());
} }
} }
assertTrue("The test never checked that location of " + assertTrue("The test never checked that location of " +
"the block and hftp desitnation are the same", checked); "the block and hftp desitnation are the same", checked);
} }
/**
* Test that clients are redirected to the appropriate DN.
*/
@Test
public void testDataNodeRedirect() throws IOException {
for (Path p : TEST_PATHS) {
testDataNodeRedirect(p);
}
}
/** /**
* Tests getPos() functionality. * Tests getPos() functionality.
*/ */
public void testGetPos() throws Exception { @Test
public void testGetPos() throws IOException {
final Path testFile = new Path("/testfile+1");
// Write a test file. // Write a test file.
FSDataOutputStream out = hdfs.create(TEST_FILE, true); FSDataOutputStream out = hdfs.create(testFile, true);
out.writeBytes("0123456789"); out.writeBytes("0123456789");
out.close(); out.close();
FSDataInputStream in = hftpFs.open(TEST_FILE); FSDataInputStream in = hftpFs.open(testFile);
// Test read(). // Test read().
for (int i = 0; i < 5; ++i) { for (int i = 0; i < 5; ++i) {
@ -175,17 +218,17 @@ public class TestHftpFileSystem extends TestCase {
assertEquals(10, in.getPos()); assertEquals(10, in.getPos());
in.close(); in.close();
} }
/** /**
* Tests seek(). * Tests seek().
*/ */
public void testSeek() throws Exception { @Test
// Write a test file. public void testSeek() throws IOException {
FSDataOutputStream out = hdfs.create(TEST_FILE, true); final Path testFile = new Path("/testfile+1");
FSDataOutputStream out = hdfs.create(testFile, true);
out.writeBytes("0123456789"); out.writeBytes("0123456789");
out.close(); out.close();
FSDataInputStream in = hftpFs.open(testFile);
FSDataInputStream in = hftpFs.open(TEST_FILE);
in.seek(7); in.seek(7);
assertEquals('7', in.read()); assertEquals('7', in.read());
} }

View File

@ -28,7 +28,6 @@ import java.net.URLEncoder;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.jsp.JspWriter; import javax.servlet.jsp.JspWriter;
import org.apache.commons.httpclient.util.URIUtil;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSTestUtil; import org.apache.hadoop.hdfs.DFSTestUtil;
@ -36,6 +35,7 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.util.ServletUtil;
import org.junit.Test; import org.junit.Test;
import org.mockito.Mockito; import org.mockito.Mockito;
@ -71,7 +71,7 @@ public class TestDatanodeJsp {
if (!doTail) { if (!doTail) {
assertTrue("page should show link to download file", viewFilePage assertTrue("page should show link to download file", viewFilePage
.contains("/streamFile" + URIUtil.encodePath(testPath.toString()) + .contains("/streamFile" + ServletUtil.encodePath(testPath.toString()) +
"?nnaddr=localhost:" + nnIpcAddress.getPort())); "?nnaddr=localhost:" + nnIpcAddress.getPort()));
} }
} }
@ -90,7 +90,23 @@ public class TestDatanodeJsp {
testViewingFile(cluster, "/test-file", true); testViewingFile(cluster, "/test-file", true);
testViewingFile(cluster, "/tmp/test-file", true); testViewingFile(cluster, "/tmp/test-file", true);
testViewingFile(cluster, "/tmp/test-file%with goofy&characters", true); testViewingFile(cluster, "/tmp/test-file%with goofy&characters", true);
testViewingFile(cluster, "/foo bar", true);
testViewingFile(cluster, "/foo+bar", true);
testViewingFile(cluster, "/foo;bar", true);
testViewingFile(cluster, "/foo=bar", true);
testViewingFile(cluster, "/foo,bar", true);
testViewingFile(cluster, "/foo?bar", true);
testViewingFile(cluster, "/foo\">bar", true);
testViewingFile(cluster, "/foo bar", false);
// See HDFS-2233
//testViewingFile(cluster, "/foo+bar", false);
//testViewingFile(cluster, "/foo;bar", false);
testViewingFile(cluster, "/foo=bar", false);
testViewingFile(cluster, "/foo,bar", false);
testViewingFile(cluster, "/foo?bar", false);
testViewingFile(cluster, "/foo\">bar", false);
} finally { } finally {
if (cluster != null) { if (cluster != null) {
cluster.shutdown(); cluster.shutdown();

View File

@ -48,8 +48,8 @@ import org.mockito.Mockito;
import org.mortbay.jetty.InclusiveByteRange; import org.mortbay.jetty.InclusiveByteRange;
/* /*
Mock input stream class that always outputs the current position of the stream * Mock input stream class that always outputs the current position of the stream.
*/ */
class MockFSInputStream extends FSInputStream { class MockFSInputStream extends FSInputStream {
long currentPos = 0; long currentPos = 0;
public int read() throws IOException { public int read() throws IOException {
@ -198,7 +198,7 @@ public class TestStreamFile {
} }
// Test for positive scenario // Test for positive scenario
@Test @Test
public void testDoGetShouldWriteTheFileContentIntoServletOutputStream() public void testDoGetShouldWriteTheFileContentIntoServletOutputStream()
throws Exception { throws Exception {
@ -264,9 +264,11 @@ public class TestStreamFile {
Mockito.doReturn(CONF).when(mockServletContext).getAttribute( Mockito.doReturn(CONF).when(mockServletContext).getAttribute(
JspHelper.CURRENT_CONF); JspHelper.CURRENT_CONF);
Mockito.doReturn(NameNode.getHostPortString(NameNode.getAddress(CONF))) Mockito.doReturn(NameNode.getHostPortString(NameNode.getAddress(CONF)))
.when(mockHttpServletRequest).getParameter("nnaddr"); .when(mockHttpServletRequest).getParameter("nnaddr");
Mockito.doReturn(testFile.toString()).when(mockHttpServletRequest) Mockito.doReturn(testFile.toString()).when(mockHttpServletRequest)
.getPathInfo(); .getPathInfo();
Mockito.doReturn("/streamFile"+testFile.toString()).when(mockHttpServletRequest)
.getRequestURI();
} }
static Path writeFile(FileSystem fs, Path f) throws IOException { static Path writeFile(FileSystem fs, Path f) throws IOException {