HDFS-14423. Percent (%) and plus (+) characters no longer work in WebHDFS.
Signed-off-by: Masatake Iwasaki <iwasakims@apache.org>
(cherry picked from commit da0006fe04
)
Conflicts:
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
This commit is contained in:
parent
88aece4083
commit
d7ca016d63
|
@ -791,12 +791,27 @@ public final class HttpServer2 implements FilterContainer {
|
||||||
*/
|
*/
|
||||||
public void addJerseyResourcePackage(final String packageName,
|
public void addJerseyResourcePackage(final String packageName,
|
||||||
final String pathSpec) {
|
final String pathSpec) {
|
||||||
|
addJerseyResourcePackage(packageName, pathSpec,
|
||||||
|
Collections.<String, String>emptyMap());
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add a Jersey resource package.
|
||||||
|
* @param packageName The Java package name containing the Jersey resource.
|
||||||
|
* @param pathSpec The path spec for the servlet
|
||||||
|
* @param params properties and features for ResourceConfig
|
||||||
|
*/
|
||||||
|
public void addJerseyResourcePackage(final String packageName,
|
||||||
|
final String pathSpec, Map<String, String> params) {
|
||||||
LOG.info("addJerseyResourcePackage: packageName=" + packageName
|
LOG.info("addJerseyResourcePackage: packageName=" + packageName
|
||||||
+ ", pathSpec=" + pathSpec);
|
+ ", pathSpec=" + pathSpec);
|
||||||
final ServletHolder sh = new ServletHolder(ServletContainer.class);
|
final ServletHolder sh = new ServletHolder(ServletContainer.class);
|
||||||
sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
|
sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
|
||||||
"com.sun.jersey.api.core.PackagesResourceConfig");
|
"com.sun.jersey.api.core.PackagesResourceConfig");
|
||||||
sh.setInitParameter("com.sun.jersey.config.property.packages", packageName);
|
sh.setInitParameter("com.sun.jersey.config.property.packages", packageName);
|
||||||
|
for (Map.Entry<String, String> entry : params.entrySet()) {
|
||||||
|
sh.setInitParameter(entry.getKey(), entry.getValue());
|
||||||
|
}
|
||||||
webAppContext.addServlet(sh, pathSpec);
|
webAppContext.addServlet(sh, pathSpec);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -37,8 +37,6 @@ import java.net.InetSocketAddress;
|
||||||
import java.net.MalformedURLException;
|
import java.net.MalformedURLException;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URL;
|
import java.net.URL;
|
||||||
import java.net.URLDecoder;
|
|
||||||
import java.net.URLEncoder;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -147,8 +145,6 @@ public class WebHdfsFileSystem extends FileSystem
|
||||||
public static final String EZ_HEADER = "X-Hadoop-Accept-EZ";
|
public static final String EZ_HEADER = "X-Hadoop-Accept-EZ";
|
||||||
public static final String FEFINFO_HEADER = "X-Hadoop-feInfo";
|
public static final String FEFINFO_HEADER = "X-Hadoop-feInfo";
|
||||||
|
|
||||||
public static final String SPECIAL_FILENAME_CHARACTERS_REGEX = ".*[;+%].*";
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Default connection factory may be overridden in tests to use smaller
|
* Default connection factory may be overridden in tests to use smaller
|
||||||
* timeout values
|
* timeout values
|
||||||
|
@ -603,44 +599,8 @@ public class WebHdfsFileSystem extends FileSystem
|
||||||
final Param<?,?>... parameters) throws IOException {
|
final Param<?,?>... parameters) throws IOException {
|
||||||
//initialize URI path and query
|
//initialize URI path and query
|
||||||
|
|
||||||
Path encodedFSPath = fspath;
|
|
||||||
if (fspath != null) {
|
|
||||||
URI fspathUri = fspath.toUri();
|
|
||||||
String fspathUriDecoded = fspathUri.getPath();
|
|
||||||
boolean pathAlreadyEncoded = false;
|
|
||||||
try {
|
|
||||||
fspathUriDecoded = URLDecoder.decode(fspathUri.getPath(), "UTF-8");
|
|
||||||
//below condition check added as part of fixing HDFS-14323 to make
|
|
||||||
//sure pathAlreadyEncoded is not set in the case the input url does
|
|
||||||
//not have any encoded sequence already.This will help pulling data
|
|
||||||
//from 2.x hadoop cluster to 3.x using 3.x distcp client operation
|
|
||||||
if(!fspathUri.getPath().equals(fspathUriDecoded)) {
|
|
||||||
pathAlreadyEncoded = true;
|
|
||||||
}
|
|
||||||
} catch (IllegalArgumentException ex) {
|
|
||||||
LOG.trace("Cannot decode URL encoded file", ex);
|
|
||||||
}
|
|
||||||
String[] fspathItems = fspathUriDecoded.split("/");
|
|
||||||
|
|
||||||
if (fspathItems.length > 0) {
|
|
||||||
StringBuilder fsPathEncodedItems = new StringBuilder();
|
|
||||||
for (String fsPathItem : fspathItems) {
|
|
||||||
fsPathEncodedItems.append("/");
|
|
||||||
if (fsPathItem.matches(SPECIAL_FILENAME_CHARACTERS_REGEX) ||
|
|
||||||
pathAlreadyEncoded) {
|
|
||||||
fsPathEncodedItems.append(URLEncoder.encode(fsPathItem, "UTF-8"));
|
|
||||||
} else {
|
|
||||||
fsPathEncodedItems.append(fsPathItem);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
encodedFSPath = new Path(fspathUri.getScheme(),
|
|
||||||
fspathUri.getAuthority(), fsPathEncodedItems.substring(1));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
final String path = PATH_PREFIX
|
final String path = PATH_PREFIX
|
||||||
+ (encodedFSPath == null ? "/" :
|
+ (fspath == null? "/": makeQualified(fspath).toUri().getRawPath());
|
||||||
makeQualified(encodedFSPath).toUri().getRawPath());
|
|
||||||
final String query = op.toQueryString()
|
final String query = op.toQueryString()
|
||||||
+ Param.toSortedString("&", getAuthParameters(op))
|
+ Param.toSortedString("&", getAuthParameters(op))
|
||||||
+ Param.toSortedString("&", parameters);
|
+ Param.toSortedString("&", parameters);
|
||||||
|
|
|
@ -58,7 +58,6 @@ import java.io.OutputStream;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.URLDecoder;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.EnumSet;
|
import java.util.EnumSet;
|
||||||
|
@ -128,7 +127,7 @@ public class WebHdfsHandler extends SimpleChannelInboundHandler<HttpRequest> {
|
||||||
params = new ParameterParser(queryString, conf);
|
params = new ParameterParser(queryString, conf);
|
||||||
DataNodeUGIProvider ugiProvider = new DataNodeUGIProvider(params);
|
DataNodeUGIProvider ugiProvider = new DataNodeUGIProvider(params);
|
||||||
ugi = ugiProvider.ugi();
|
ugi = ugiProvider.ugi();
|
||||||
path = URLDecoder.decode(params.path(), "UTF-8");
|
path = params.path();
|
||||||
|
|
||||||
injectToken();
|
injectToken();
|
||||||
ugi.doAs(new PrivilegedExceptionAction<Void>() {
|
ugi.doAs(new PrivilegedExceptionAction<Void>() {
|
||||||
|
|
|
@ -51,6 +51,8 @@ import org.apache.hadoop.security.SecurityUtil;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
|
import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
|
||||||
|
|
||||||
|
import com.sun.jersey.api.core.ResourceConfig;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Encapsulates the HTTP server started by the NameNode.
|
* Encapsulates the HTTP server started by the NameNode.
|
||||||
*/
|
*/
|
||||||
|
@ -112,9 +114,11 @@ public class NameNodeHttpServer {
|
||||||
}
|
}
|
||||||
|
|
||||||
// add webhdfs packages
|
// add webhdfs packages
|
||||||
|
final Map<String, String> resourceParams = new HashMap<>();
|
||||||
|
resourceParams.put(ResourceConfig.FEATURE_MATCH_MATRIX_PARAMS, "true");
|
||||||
httpServer2.addJerseyResourcePackage(
|
httpServer2.addJerseyResourcePackage(
|
||||||
jerseyResourcePackage + ";" + Param.class.getPackage().getName(),
|
jerseyResourcePackage + ";" + Param.class.getPackage().getName(),
|
||||||
pathSpec);
|
pathSpec, resourceParams);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -25,7 +25,6 @@ import java.io.PrintWriter;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.net.URISyntaxException;
|
import java.net.URISyntaxException;
|
||||||
import java.net.URLDecoder;
|
|
||||||
import java.net.UnknownHostException;
|
import java.net.UnknownHostException;
|
||||||
import java.security.Principal;
|
import java.security.Principal;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
@ -1060,9 +1059,7 @@ public class NamenodeWebHdfsMethods {
|
||||||
return doAs(ugi, new PrivilegedExceptionAction<Response>() {
|
return doAs(ugi, new PrivilegedExceptionAction<Response>() {
|
||||||
@Override
|
@Override
|
||||||
public Response run() throws IOException, URISyntaxException {
|
public Response run() throws IOException, URISyntaxException {
|
||||||
String absolutePath = path.getAbsolutePath() == null ? null :
|
return get(ugi, delegation, username, doAsUser, path.getAbsolutePath(),
|
||||||
URLDecoder.decode(path.getAbsolutePath(), "UTF-8");
|
|
||||||
return get(ugi, delegation, username, doAsUser, absolutePath,
|
|
||||||
op, offset, length, renewer, bufferSize, xattrNames, xattrEncoding,
|
op, offset, length, renewer, bufferSize, xattrNames, xattrEncoding,
|
||||||
excludeDatanodes, fsAction, snapshotName, oldSnapshotName,
|
excludeDatanodes, fsAction, snapshotName, oldSnapshotName,
|
||||||
tokenKind, tokenService, noredirect, startAfter);
|
tokenKind, tokenService, noredirect, startAfter);
|
||||||
|
|
|
@ -38,6 +38,7 @@ import org.apache.hadoop.fs.RemoteIterator;
|
||||||
import org.apache.hadoop.fs.WebHdfs;
|
import org.apache.hadoop.fs.WebHdfs;
|
||||||
import org.apache.hadoop.fs.permission.FsAction;
|
import org.apache.hadoop.fs.permission.FsAction;
|
||||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||||
|
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
||||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
|
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
|
||||||
|
@ -76,7 +77,7 @@ public class TestWebHdfsUrl {
|
||||||
uri, conf);
|
uri, conf);
|
||||||
|
|
||||||
// Construct a file path that contains percentage-encoded string
|
// Construct a file path that contains percentage-encoded string
|
||||||
String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668";
|
String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668+";
|
||||||
Path fsPath = new Path(pathName);
|
Path fsPath = new Path(pathName);
|
||||||
URL encodedPathUrl = webhdfs.toUrl(PutOpParam.Op.CREATE, fsPath);
|
URL encodedPathUrl = webhdfs.toUrl(PutOpParam.Op.CREATE, fsPath);
|
||||||
// We should get back the original file path after cycling back and decoding
|
// We should get back the original file path after cycling back and decoding
|
||||||
|
@ -415,15 +416,11 @@ public class TestWebHdfsUrl {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final String BACKWARD_COMPATIBLE_SPECIAL_CHARACTER_FILENAME =
|
private static final String BACKWARD_COMPATIBLE_SPECIAL_CHARACTER_FILENAME =
|
||||||
"specialFile ?\"\\()[]_-=&,{}#'`~!@$^*|<>.";
|
"specialFile ?\"\\()[]_-=&,{}#'`~!@$^*|<>.+%";
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testWebHdfsBackwardCompatibleSpecialCharacterFile()
|
public void testWebHdfsBackwardCompatibleSpecialCharacterFile()
|
||||||
throws Exception {
|
throws Exception {
|
||||||
|
|
||||||
assertFalse(BACKWARD_COMPATIBLE_SPECIAL_CHARACTER_FILENAME
|
|
||||||
.matches(WebHdfsFileSystem.SPECIAL_FILENAME_CHARACTERS_REGEX));
|
|
||||||
|
|
||||||
UserGroupInformation ugi =
|
UserGroupInformation ugi =
|
||||||
UserGroupInformation.createRemoteUser("test-user");
|
UserGroupInformation.createRemoteUser("test-user");
|
||||||
ugi.setAuthenticationMethod(KERBEROS);
|
ugi.setAuthenticationMethod(KERBEROS);
|
||||||
|
@ -483,4 +480,33 @@ public class TestWebHdfsUrl {
|
||||||
WebHdfsTestUtil.LOG.info(url.getPath());
|
WebHdfsTestUtil.LOG.info(url.getPath());
|
||||||
assertEquals(WebHdfsFileSystem.PATH_PREFIX + path, url.getPath());
|
assertEquals(WebHdfsFileSystem.PATH_PREFIX + path, url.getPath());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testWebHdfsPathWithSemicolon() throws Exception {
|
||||||
|
try (MiniDFSCluster cluster =
|
||||||
|
new MiniDFSCluster.Builder(WebHdfsTestUtil.createConf())
|
||||||
|
.numDataNodes(1)
|
||||||
|
.build()) {
|
||||||
|
cluster.waitActive();
|
||||||
|
|
||||||
|
// regression test for HDFS-14423.
|
||||||
|
final Path semicolon = new Path("/a;b");
|
||||||
|
final Path plus = new Path("/a+b");
|
||||||
|
final Path percent = new Path("/a%b");
|
||||||
|
|
||||||
|
final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(
|
||||||
|
cluster.getConfiguration(0), WebHdfs.SCHEME);
|
||||||
|
webhdfs.create(semicolon).close();
|
||||||
|
webhdfs.create(plus).close();
|
||||||
|
webhdfs.create(percent).close();
|
||||||
|
|
||||||
|
final DistributedFileSystem dfs = cluster.getFileSystem();
|
||||||
|
assertEquals(semicolon.getName(),
|
||||||
|
dfs.getFileStatus(semicolon).getPath().getName());
|
||||||
|
assertEquals(plus.getName(),
|
||||||
|
dfs.getFileStatus(plus).getPath().getName());
|
||||||
|
assertEquals(percent.getName(),
|
||||||
|
dfs.getFileStatus(percent).getPath().getName());
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue