HDFS-2235. Encode servlet paths. Contributed by Eli Collins

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1156967 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2011-08-12 05:03:05 +00:00
parent a9a4f25762
commit 4673ab17b0
13 changed files with 255 additions and 218 deletions

View File

@ -957,6 +957,8 @@ Trunk (unreleased changes)
HDFS-2229. Fix a deadlock in namenode by enforcing lock acquisition
ordering. (szetszwo)
HDFS-2235. Encode servlet paths. (eli)
BREAKDOWN OF HDFS-1073 SUBTASKS
HDFS-1521. Persist transaction ID on disk between NN restarts.

View File

@ -61,6 +61,7 @@ import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ServletUtil;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
@ -241,18 +242,6 @@ public class HftpFileSystem extends FileSystem {
}
}
/**
* Return a URL pointing to given path on the namenode.
*
* @param p path to obtain the URL for
* @return namenode URL referring to the given path
* @throws IOException on error constructing the URL
*/
URL getNamenodeFileURL(Path p) throws IOException {
return getNamenodeURL("/data" + p.toUri().getPath(),
"ugi=" + getUgiParameter());
}
/**
* Return a URL pointing to given path on the namenode.
*
@ -262,28 +251,25 @@ public class HftpFileSystem extends FileSystem {
* @throws IOException on error constructing the URL
*/
URL getNamenodeURL(String path, String query) throws IOException {
try {
final URL url = new URI("http", null, nnAddr.getHostName(),
nnAddr.getPort(), path, query, null).toURL();
if (LOG.isTraceEnabled()) {
LOG.trace("url=" + url);
}
return url;
} catch (URISyntaxException e) {
throw new IOException(e);
final URL url = new URL("http", nnAddr.getHostName(),
nnAddr.getPort(), path + '?' + query);
if (LOG.isTraceEnabled()) {
LOG.trace("url=" + url);
}
return url;
}
/**
* ugi parameter for http connection
* Get encoded UGI parameter string for a URL.
*
* @return user_shortname,group1,group2...
*/
private String getUgiParameter() {
StringBuilder ugiParamenter = new StringBuilder(ugi.getShortUserName());
private String getEncodedUgiParameter() {
StringBuilder ugiParamenter = new StringBuilder(
ServletUtil.encodeQueryValue(ugi.getShortUserName()));
for(String g: ugi.getGroupNames()) {
ugiParamenter.append(",");
ugiParamenter.append(g);
ugiParamenter.append(ServletUtil.encodeQueryValue(g));
}
return ugiParamenter.toString();
}
@ -304,7 +290,7 @@ public class HftpFileSystem extends FileSystem {
*/
protected HttpURLConnection openConnection(String path, String query)
throws IOException {
query = updateQuery(query);
query = addDelegationTokenParam(query);
final URL url = getNamenodeURL(path, query);
final HttpURLConnection connection = (HttpURLConnection)url.openConnection();
try {
@ -316,14 +302,14 @@ public class HftpFileSystem extends FileSystem {
return connection;
}
protected String updateQuery(String query) throws IOException {
protected String addDelegationTokenParam(String query) throws IOException {
String tokenString = null;
if (UserGroupInformation.isSecurityEnabled()) {
synchronized (this) {
if (delegationToken != null) {
tokenString = delegationToken.encodeToUrlString();
return (query + JspHelper.getDelegationTokenUrlParam(tokenString));
} // else we are talking to an insecure cluster
}
}
}
return query;
@ -331,9 +317,9 @@ public class HftpFileSystem extends FileSystem {
@Override
public FSDataInputStream open(Path f, int buffersize) throws IOException {
String query = "ugi=" + getUgiParameter();
query = updateQuery(query);
URL u = getNamenodeURL("/data" + f.toUri().getPath(), query);
String path = "/data" + ServletUtil.encodePath(f.toUri().getPath());
String query = addDelegationTokenParam("ugi=" + getEncodedUgiParameter());
URL u = getNamenodeURL(path, query);
return new FSDataInputStream(new ByteRangeInputStream(u));
}
@ -382,9 +368,9 @@ public class HftpFileSystem extends FileSystem {
try {
XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this);
HttpURLConnection connection = openConnection("/listPaths" + path,
"ugi=" + getUgiParameter() + (recur? "&recursive=yes" : ""));
HttpURLConnection connection = openConnection(
"/listPaths" + ServletUtil.encodePath(path),
"ugi=" + getEncodedUgiParameter() + (recur ? "&recursive=yes" : ""));
InputStream resp = connection.getInputStream();
xr.parse(new InputSource(resp));
} catch(SAXException e) {
@ -447,7 +433,8 @@ public class HftpFileSystem extends FileSystem {
private FileChecksum getFileChecksum(String f) throws IOException {
final HttpURLConnection connection = openConnection(
"/fileChecksum" + f, "ugi=" + getUgiParameter());
"/fileChecksum" + ServletUtil.encodePath(f),
"ugi=" + getEncodedUgiParameter());
try {
final XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this);
@ -534,7 +521,8 @@ public class HftpFileSystem extends FileSystem {
*/
private ContentSummary getContentSummary(String path) throws IOException {
final HttpURLConnection connection = openConnection(
"/contentSummary" + path, "ugi=" + getUgiParameter());
"/contentSummary" + ServletUtil.encodePath(path),
"ugi=" + getEncodedUgiParameter());
InputStream in = null;
try {
in = connection.getInputStream();

View File

@ -123,42 +123,42 @@ public class HsftpFileSystem extends HftpFileSystem {
@Override
protected HttpURLConnection openConnection(String path, String query)
throws IOException {
query = addDelegationTokenParam(query);
final URL url = new URL("https", nnAddr.getHostName(),
nnAddr.getPort(), path + '?' + query);
HttpsURLConnection conn = (HttpsURLConnection)url.openConnection();
// bypass hostname verification
try {
query = updateQuery(query);
final URL url = new URI("https", null, nnAddr.getHostName(), nnAddr
.getPort(), path, query, null).toURL();
HttpsURLConnection conn = (HttpsURLConnection) url.openConnection();
// bypass hostname verification
conn.setHostnameVerifier(new DummyHostnameVerifier());
conn.setRequestMethod("GET");
conn.connect();
} catch (IOException ioe) {
throwIOExceptionFromConnection(conn, ioe);
}
// check cert expiration date
final int warnDays = ExpWarnDays;
if (warnDays > 0) { // make sure only check once
ExpWarnDays = 0;
long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY
+ System.currentTimeMillis();
X509Certificate[] clientCerts = (X509Certificate[]) conn
.getLocalCertificates();
if (clientCerts != null) {
for (X509Certificate cert : clientCerts) {
long expTime = cert.getNotAfter().getTime();
if (expTime < expTimeThreshold) {
StringBuilder sb = new StringBuilder();
sb.append("\n Client certificate "
+ cert.getSubjectX500Principal().getName());
int dayOffSet = (int) ((expTime - System.currentTimeMillis()) / MM_SECONDS_PER_DAY);
sb.append(" have " + dayOffSet + " days to expire");
LOG.warn(sb.toString());
}
// check cert expiration date
final int warnDays = ExpWarnDays;
if (warnDays > 0) { // make sure only check once
ExpWarnDays = 0;
long expTimeThreshold = warnDays * MM_SECONDS_PER_DAY
+ System.currentTimeMillis();
X509Certificate[] clientCerts = (X509Certificate[]) conn
.getLocalCertificates();
if (clientCerts != null) {
for (X509Certificate cert : clientCerts) {
long expTime = cert.getNotAfter().getTime();
if (expTime < expTimeThreshold) {
StringBuilder sb = new StringBuilder();
sb.append("\n Client certificate "
+ cert.getSubjectX500Principal().getName());
int dayOffSet = (int) ((expTime - System.currentTimeMillis()) / MM_SECONDS_PER_DAY);
sb.append(" have " + dayOffSet + " days to expire");
LOG.warn(sb.toString());
}
}
}
return (HttpURLConnection) conn;
} catch (URISyntaxException e) {
throw (IOException) new IOException().initCause(e);
}
return (HttpURLConnection) conn;
}
@Override

View File

@ -47,8 +47,8 @@ import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.StringUtils;
import org.mortbay.util.URIUtil;
@InterfaceAudience.Private
public class DatanodeJspHelper {
@ -289,7 +289,7 @@ public class DatanodeJspHelper {
// Add the various links for looking at the file contents
// URL for downloading the full file
String downloadUrl = "http://" + req.getServerName() + ":"
+ req.getServerPort() + "/streamFile" + URIUtil.encodePath(filename)
+ req.getServerPort() + "/streamFile" + ServletUtil.encodePath(filename)
+ JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr, true)
+ JspHelper.getDelegationTokenUrlParam(tokenString);
out.print("<a name=\"viewOptions\"></a>");

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
import org.znerd.xmlenc.XMLOutputter;
/** Servlets for file checksum */
@ -49,8 +50,7 @@ public class ContentSummaryServlet extends DfsServlet {
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
final String path = request.getPathInfo();
final String path = ServletUtil.getDecodedPath(request, "/contentSummary");
final PrintWriter out = response.getWriter();
final XMLOutputter xml = new XMLOutputter(out, "UTF-8");
xml.declaration();

View File

@ -19,8 +19,6 @@ package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import javax.servlet.ServletContext;
import javax.servlet.http.HttpServlet;
@ -33,8 +31,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.UserGroupInformation;
@ -86,48 +82,6 @@ abstract class DfsServlet extends HttpServlet {
return DFSUtil.createNamenode(nnAddr, conf);
}
/** Create a URI for redirecting request to a datanode */
protected URI createRedirectUri(String servletpath,
UserGroupInformation ugi,
DatanodeID host,
HttpServletRequest request,
NameNode nn
) throws IOException, URISyntaxException {
final String hostname = host instanceof DatanodeInfo?
((DatanodeInfo)host).getHostName(): host.getHost();
final String scheme = request.getScheme();
final int port = "https".equals(scheme)?
(Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort();
final String filename = request.getPathInfo();
StringBuilder params = new StringBuilder();
params.append("filename=");
params.append(filename);
if (UserGroupInformation.isSecurityEnabled()) {
String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
params.append(JspHelper.getDelegationTokenUrlParam(tokenString));
} else {
params.append("&ugi=");
params.append(ugi.getShortUserName());
}
// Add namenode address to the URL params
String nnAddr = NameNode.getHostPortString(nn.getNameNodeAddress());
params.append(JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, nnAddr));
return new URI(scheme, null, hostname, port, servletpath,
params.toString(), null);
}
/** Get filename from the request */
protected String getFilename(HttpServletRequest request,
HttpServletResponse response) throws IOException {
final String filename = request.getParameter("filename");
if (filename == null || filename.length() == 0) {
throw new IOException("Invalid filename");
}
return filename;
}
protected UserGroupInformation getUGI(HttpServletRequest request,
Configuration conf) throws IOException {
return JspHelper.getUGI(getServletContext(), request, conf);

View File

@ -21,6 +21,7 @@ import java.io.IOException;
import java.io.PrintWriter;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import javax.net.SocketFactory;
import javax.servlet.ServletContext;
@ -36,11 +37,14 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.server.common.HdfsConstants;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
import org.znerd.xmlenc.XMLOutputter;
/** Servlets for file checksum */
@ -52,6 +56,32 @@ public class FileChecksumServlets {
/** For java.io.Serializable */
private static final long serialVersionUID = 1L;
/** Create a redirection URL */
private URL createRedirectURL(UserGroupInformation ugi, DatanodeID host,
HttpServletRequest request, NameNode nn)
throws IOException, URISyntaxException {
final String hostname = host instanceof DatanodeInfo
? ((DatanodeInfo)host).getHostName() : host.getHost();
final String scheme = request.getScheme();
final int port = "https".equals(scheme)
? (Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort();
final String encodedPath = ServletUtil.getRawPath(request, "/fileChecksum");
String dtParam = "";
if (UserGroupInformation.isSecurityEnabled()) {
String tokenString = ugi.getTokens().iterator().next().encodeToUrlString();
dtParam = JspHelper.getDelegationTokenUrlParam(tokenString);
}
String addr = NameNode.getHostPortString(nn.getNameNodeAddress());
String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
return new URL(scheme, hostname, port,
"/getFileChecksum" + encodedPath + '?' +
"ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
dtParam + addrParam);
}
/** {@inheritDoc} */
public void doGet(HttpServletRequest request, HttpServletResponse response
) throws ServletException, IOException {
@ -62,12 +92,10 @@ public class FileChecksumServlets {
context);
final DatanodeID datanode = NamenodeJspHelper.getRandomDatanode(namenode);
try {
final URI uri = createRedirectUri("/getFileChecksum", ugi, datanode,
request, namenode);
response.sendRedirect(uri.toURL().toString());
response.sendRedirect(
createRedirectURL(ugi, datanode, request, namenode).toString());
} catch(URISyntaxException e) {
throw new ServletException(e);
//response.getWriter().println(e.toString());
} catch (IOException e) {
response.sendError(400, e.getMessage());
}
@ -84,7 +112,7 @@ public class FileChecksumServlets {
public void doGet(HttpServletRequest request, HttpServletResponse response
) throws ServletException, IOException {
final PrintWriter out = response.getWriter();
final String filename = getFilename(request, response);
final String path = ServletUtil.getDecodedPath(request, "/getFileChecksum");
final XMLOutputter xml = new XMLOutputter(out, "UTF-8");
xml.declaration();
@ -103,12 +131,12 @@ public class FileChecksumServlets {
datanode, conf, getUGI(request, conf));
final ClientProtocol nnproxy = dfs.getNamenode();
final MD5MD5CRC32FileChecksum checksum = DFSClient.getFileChecksum(
filename, nnproxy, socketFactory, socketTimeout);
path, nnproxy, socketFactory, socketTimeout);
MD5MD5CRC32FileChecksum.write(xml, checksum);
} catch(IOException ioe) {
writeXml(ioe, filename, xml);
writeXml(ioe, path, xml);
} catch (InterruptedException e) {
writeXml(e, filename, xml);
writeXml(e, path, xml);
}
xml.endDocument();
}

View File

@ -18,8 +18,8 @@
package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.security.PrivilegedExceptionAction;
import javax.servlet.http.HttpServletRequest;
@ -35,6 +35,7 @@ import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
/** Redirect queries about the hosted filesystem to an appropriate datanode.
* @see org.apache.hadoop.hdfs.HftpFileSystem
@ -44,22 +45,25 @@ public class FileDataServlet extends DfsServlet {
/** For java.io.Serializable */
private static final long serialVersionUID = 1L;
/** Create a redirection URI */
protected URI createUri(String parent, HdfsFileStatus i, UserGroupInformation ugi,
ClientProtocol nnproxy, HttpServletRequest request, String dt)
/** Create a redirection URL */
private URL createRedirectURL(String path, String encodedPath, HdfsFileStatus status,
UserGroupInformation ugi, ClientProtocol nnproxy, HttpServletRequest request, String dt)
throws IOException, URISyntaxException {
String scheme = request.getScheme();
final LocatedBlocks blks = nnproxy.getBlockLocations(
i.getFullPath(new Path(parent)).toUri().getPath(), 0, 1);
final DatanodeID host = pickSrcDatanode(blks, i);
status.getFullPath(new Path(path)).toUri().getPath(), 0, 1);
final DatanodeID host = pickSrcDatanode(blks, status);
final String hostname;
if (host instanceof DatanodeInfo) {
hostname = ((DatanodeInfo)host).getHostName();
} else {
hostname = host.getHost();
}
String dtParam="";
final int port = "https".equals(scheme)
? (Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort();
String dtParam = "";
if (dt != null) {
dtParam=JspHelper.getDelegationTokenUrlParam(dt);
}
@ -70,12 +74,10 @@ public class FileDataServlet extends DfsServlet {
String addr = NameNode.getHostPortString(nn.getNameNodeAddress());
String addrParam = JspHelper.getUrlParam(JspHelper.NAMENODE_ADDRESS, addr);
return new URI(scheme, null, hostname,
"https".equals(scheme)
? (Integer)getServletContext().getAttribute("datanode.https.port")
: host.getInfoPort(),
"/streamFile" + i.getFullName(parent),
"ugi=" + ugi.getShortUserName() + dtParam + addrParam, null);
return new URL(scheme, hostname, port,
"/streamFile" + encodedPath + '?' +
"ugi=" + ServletUtil.encodeQueryValue(ugi.getShortUserName()) +
dtParam + addrParam);
}
/** Select a datanode to service this request.
@ -112,17 +114,16 @@ public class FileDataServlet extends DfsServlet {
@Override
public Void run() throws IOException {
ClientProtocol nn = createNameNodeProxy();
final String path = request.getPathInfo() != null ? request
.getPathInfo() : "/";
final String path = ServletUtil.getDecodedPath(request, "/data");
final String encodedPath = ServletUtil.getRawPath(request, "/data");
String delegationToken = request
.getParameter(JspHelper.DELEGATION_PARAMETER_NAME);
HdfsFileStatus info = nn.getFileInfo(path);
if (info != null && !info.isDir()) {
try {
response.sendRedirect(createUri(path, info, ugi, nn, request,
delegationToken).toURL().toString());
response.sendRedirect(createRedirectURL(path, encodedPath,
info, ugi, nn, request, delegationToken).toString());
} catch (URISyntaxException e) {
response.getWriter().println(e.toString());
}

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.util.ServletUtil;
import org.apache.hadoop.util.VersionInfo;
import org.znerd.xmlenc.*;
@ -86,8 +87,7 @@ public class ListPathsServlet extends DfsServlet {
*/
protected Map<String,String> buildRoot(HttpServletRequest request,
XMLOutputter doc) {
final String path = request.getPathInfo() != null
? request.getPathInfo() : "/";
final String path = ServletUtil.getDecodedPath(request, "/listPaths");
final String exclude = request.getParameter("exclude") != null
? request.getParameter("exclude") : "";
final String filter = request.getParameter("filter") != null
@ -135,6 +135,7 @@ public class ListPathsServlet extends DfsServlet {
final Map<String, String> root = buildRoot(request, doc);
final String path = root.get("path");
final String filePath = ServletUtil.getDecodedPath(request, "/listPaths");
try {
final boolean recur = "yes".equals(root.get("recursive"));
@ -153,7 +154,7 @@ public class ListPathsServlet extends DfsServlet {
doc.attribute(m.getKey(), m.getValue());
}
HdfsFileStatus base = nn.getFileInfo(path);
HdfsFileStatus base = nn.getFileInfo(filePath);
if ((base != null) && base.isDir()) {
writeInfo(base.getFullPath(new Path(path)), base, doc);
}

View File

@ -38,6 +38,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DatanodeJspHelper;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
import org.mortbay.jetty.InclusiveByteRange;
@InterfaceAudience.Private
@ -57,13 +58,14 @@ public class StreamFile extends DfsServlet {
final DataNode datanode = (DataNode) context.getAttribute("datanode");
return DatanodeJspHelper.getDFSClient(request, datanode, conf, ugi);
}
@SuppressWarnings("unchecked")
public void doGet(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
final String path = request.getPathInfo() != null ?
request.getPathInfo() : "/";
final String path = ServletUtil.getDecodedPath(request, "/streamFile");
final String rawPath = ServletUtil.getRawPath(request, "/streamFile");
final String filename = JspHelper.validatePath(path);
final String rawFilename = JspHelper.validatePath(rawPath);
if (filename == null) {
response.setContentType("text/plain");
PrintWriter out = response.getWriter();
@ -98,7 +100,7 @@ public class StreamFile extends DfsServlet {
} else {
// No ranges, so send entire file
response.setHeader("Content-Disposition", "attachment; filename=\"" +
filename + "\"");
rawFilename + "\"");
response.setContentType("application/octet-stream");
response.setHeader(CONTENT_LENGTH, "" + fileLen);
StreamFile.copyFromOffset(in, out, 0L, fileLen);

View File

@ -19,14 +19,15 @@
package org.apache.hadoop.hdfs;
import java.io.IOException;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.HttpURLConnection;
import java.util.Random;
import junit.extensions.TestSetup;
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.junit.Test;
import org.junit.BeforeClass;
import org.junit.AfterClass;
import static org.junit.Assert.*;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
@ -39,26 +40,48 @@ import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils;
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
import org.apache.hadoop.util.ServletUtil;
import org.apache.log4j.Level;
/**
* Unittest for HftpFileSystem.
*
*/
public class TestHftpFileSystem extends TestCase {
public class TestHftpFileSystem {
private static final Random RAN = new Random();
private static final Path TEST_FILE = new Path("/testfile+1");
private static Configuration config = null;
private static MiniDFSCluster cluster = null;
private static FileSystem hdfs = null;
private static HftpFileSystem hftpFs = null;
private static String blockPoolId = null;
/**
* Setup hadoop mini-cluster for test.
*/
private static void oneTimeSetUp() throws IOException {
private static Path[] TEST_PATHS = new Path[] {
// URI does not encode, Request#getPathInfo returns /foo
new Path("/foo;bar"),
// URI does not encode, Request#getPathInfo returns verbatim
new Path("/foo+"),
new Path("/foo+bar/foo+bar"),
new Path("/foo=bar/foo=bar"),
new Path("/foo,bar/foo,bar"),
new Path("/foo@bar/foo@bar"),
new Path("/foo&bar/foo&bar"),
new Path("/foo$bar/foo$bar"),
new Path("/foo_bar/foo_bar"),
new Path("/foo~bar/foo~bar"),
new Path("/foo.bar/foo.bar"),
new Path("/foo../bar/foo../bar"),
new Path("/foo.../bar/foo.../bar"),
new Path("/foo'bar/foo'bar"),
new Path("/foo#bar/foo#bar"),
new Path("/foo!bar/foo!bar"),
// HDFS file names may not contain ":"
// URI percent encodes, Request#getPathInfo decodes
new Path("/foo bar/foo bar"),
new Path("/foo?bar/foo?bar"),
new Path("/foo\">bar/foo\">bar"),
};
@BeforeClass
public static void setUp() throws IOException {
((Log4JLogger)HftpFileSystem.LOG).getLogger().setLevel(Level.ALL);
final long seed = RAN.nextLong();
@ -67,66 +90,73 @@ public class TestHftpFileSystem extends TestCase {
config = new Configuration();
config.set(DFSConfigKeys.DFS_DATANODE_HOST_NAME_KEY, "localhost");
cluster = new MiniDFSCluster.Builder(config).numDataNodes(2).build();
hdfs = cluster.getFileSystem();
blockPoolId = cluster.getNamesystem().getBlockPoolId();
final String hftpuri =
final String hftpUri =
"hftp://" + config.get(DFSConfigKeys.DFS_NAMENODE_HTTP_ADDRESS_KEY);
hftpFs = (HftpFileSystem) new Path(hftpuri).getFileSystem(config);
hftpFs = (HftpFileSystem) new Path(hftpUri).getFileSystem(config);
}
/**
* Shutdown the hadoop mini-cluster.
*/
private static void oneTimeTearDown() throws IOException {
@AfterClass
public static void tearDown() throws IOException {
hdfs.close();
hftpFs.close();
cluster.shutdown();
}
public TestHftpFileSystem(String name) {
super(name);
}
/**
* For one time setup / teardown.
* Test file creation and access with file names that need encoding.
*/
public static Test suite() {
TestSuite suite = new TestSuite();
suite.addTestSuite(TestHftpFileSystem.class);
return new TestSetup(suite) {
@Override
protected void setUp() throws IOException {
oneTimeSetUp();
}
@Override
protected void tearDown() throws IOException {
oneTimeTearDown();
}
};
}
public void testDataNodeRedirect() throws Exception {
if (hdfs.exists(TEST_FILE)) {
hdfs.delete(TEST_FILE, true);
@Test
public void testFileNameEncoding() throws IOException, URISyntaxException {
for (Path p : TEST_PATHS) {
// Create and access the path (data and streamFile servlets)
FSDataOutputStream out = hdfs.create(p, true);
out.writeBytes("0123456789");
out.close();
FSDataInputStream in = hftpFs.open(p);
assertEquals('0', in.read());
// Check the file status matches the path. Hftp returns a FileStatus
// with the entire URI, extract the path part.
assertEquals(p, new Path(hftpFs.getFileStatus(p).getPath().toUri().getPath()));
// Test list status (listPath servlet)
assertEquals(1, hftpFs.listStatus(p).length);
// Test content summary (contentSummary servlet)
assertNotNull("No content summary", hftpFs.getContentSummary(p));
// Test checksums (fileChecksum and getFileChecksum servlets)
assertNotNull("No file checksum", hftpFs.getFileChecksum(p));
}
FSDataOutputStream out = hdfs.create(TEST_FILE, (short) 1);
}
private void testDataNodeRedirect(Path path) throws IOException {
// Create the file
if (hdfs.exists(path)) {
hdfs.delete(path, true);
}
FSDataOutputStream out = hdfs.create(path, (short)1);
out.writeBytes("0123456789");
out.close();
// Get the path's block location so we can determine
// if we were redirected to the right DN.
BlockLocation[] locations =
hdfs.getFileBlockLocations(TEST_FILE, 0, 10);
hdfs.getFileBlockLocations(path, 0, 10);
String locationName = locations[0].getNames()[0];
URL u = hftpFs.getNamenodeFileURL(TEST_FILE);
// Connect to the NN to get redirected
URL u = hftpFs.getNamenodeURL(
"/data" + ServletUtil.encodePath(path.toUri().getPath()),
"ugi=userx,groupy");
HttpURLConnection conn = (HttpURLConnection)u.openConnection();
HttpURLConnection.setFollowRedirects(true);
conn.connect();
conn.getInputStream();
boolean checked = false;
// Find the datanode that has the block according to locations
// and check that the URL was redirected to this DN's info port
@ -138,19 +168,32 @@ public class TestHftpFileSystem extends TestCase {
assertEquals(dnR.getInfoPort(), conn.getURL().getPort());
}
}
assertTrue("The test never checked that location of " +
"the block and hftp desitnation are the same", checked);
assertTrue("The test never checked that location of " +
"the block and hftp desitnation are the same", checked);
}
/**
* Test that clients are redirected to the appropriate DN.
*/
@Test
public void testDataNodeRedirect() throws IOException {
for (Path p : TEST_PATHS) {
testDataNodeRedirect(p);
}
}
/**
* Tests getPos() functionality.
*/
public void testGetPos() throws Exception {
@Test
public void testGetPos() throws IOException {
final Path testFile = new Path("/testfile+1");
// Write a test file.
FSDataOutputStream out = hdfs.create(TEST_FILE, true);
FSDataOutputStream out = hdfs.create(testFile, true);
out.writeBytes("0123456789");
out.close();
FSDataInputStream in = hftpFs.open(TEST_FILE);
FSDataInputStream in = hftpFs.open(testFile);
// Test read().
for (int i = 0; i < 5; ++i) {
@ -175,17 +218,17 @@ public class TestHftpFileSystem extends TestCase {
assertEquals(10, in.getPos());
in.close();
}
/**
* Tests seek().
*/
public void testSeek() throws Exception {
// Write a test file.
FSDataOutputStream out = hdfs.create(TEST_FILE, true);
@Test
public void testSeek() throws IOException {
final Path testFile = new Path("/testfile+1");
FSDataOutputStream out = hdfs.create(testFile, true);
out.writeBytes("0123456789");
out.close();
FSDataInputStream in = hftpFs.open(TEST_FILE);
FSDataInputStream in = hftpFs.open(testFile);
in.seek(7);
assertEquals('7', in.read());
}

View File

@ -28,7 +28,6 @@ import java.net.URLEncoder;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.jsp.JspWriter;
import org.apache.commons.httpclient.util.URIUtil;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSTestUtil;
@ -36,6 +35,7 @@ import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.JspHelper;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.util.ServletUtil;
import org.junit.Test;
import org.mockito.Mockito;
@ -71,7 +71,7 @@ public class TestDatanodeJsp {
if (!doTail) {
assertTrue("page should show link to download file", viewFilePage
.contains("/streamFile" + URIUtil.encodePath(testPath.toString()) +
.contains("/streamFile" + ServletUtil.encodePath(testPath.toString()) +
"?nnaddr=localhost:" + nnIpcAddress.getPort()));
}
}
@ -90,7 +90,23 @@ public class TestDatanodeJsp {
testViewingFile(cluster, "/test-file", true);
testViewingFile(cluster, "/tmp/test-file", true);
testViewingFile(cluster, "/tmp/test-file%with goofy&characters", true);
testViewingFile(cluster, "/foo bar", true);
testViewingFile(cluster, "/foo+bar", true);
testViewingFile(cluster, "/foo;bar", true);
testViewingFile(cluster, "/foo=bar", true);
testViewingFile(cluster, "/foo,bar", true);
testViewingFile(cluster, "/foo?bar", true);
testViewingFile(cluster, "/foo\">bar", true);
testViewingFile(cluster, "/foo bar", false);
// See HDFS-2233
//testViewingFile(cluster, "/foo+bar", false);
//testViewingFile(cluster, "/foo;bar", false);
testViewingFile(cluster, "/foo=bar", false);
testViewingFile(cluster, "/foo,bar", false);
testViewingFile(cluster, "/foo?bar", false);
testViewingFile(cluster, "/foo\">bar", false);
} finally {
if (cluster != null) {
cluster.shutdown();

View File

@ -48,8 +48,8 @@ import org.mockito.Mockito;
import org.mortbay.jetty.InclusiveByteRange;
/*
Mock input stream class that always outputs the current position of the stream
*/
* Mock input stream class that always outputs the current position of the stream.
*/
class MockFSInputStream extends FSInputStream {
long currentPos = 0;
public int read() throws IOException {
@ -198,7 +198,7 @@ public class TestStreamFile {
}
// Test for positive scenario
// Test for positive scenario
@Test
public void testDoGetShouldWriteTheFileContentIntoServletOutputStream()
throws Exception {
@ -264,9 +264,11 @@ public class TestStreamFile {
Mockito.doReturn(CONF).when(mockServletContext).getAttribute(
JspHelper.CURRENT_CONF);
Mockito.doReturn(NameNode.getHostPortString(NameNode.getAddress(CONF)))
.when(mockHttpServletRequest).getParameter("nnaddr");
.when(mockHttpServletRequest).getParameter("nnaddr");
Mockito.doReturn(testFile.toString()).when(mockHttpServletRequest)
.getPathInfo();
.getPathInfo();
Mockito.doReturn("/streamFile"+testFile.toString()).when(mockHttpServletRequest)
.getRequestURI();
}
static Path writeFile(FileSystem fs, Path f) throws IOException {