HDFS-14423. Percent (%) and plus (+) characters no longer work in WebHDFS.

Signed-off-by: Masatake Iwasaki <iwasakims@apache.org>
(cherry picked from commit da0006fe0473e353ee2d489156248a01aa982dfd)

 Conflicts:
	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java

(cherry picked from commit d7ca016d63d89e5c8377a035f93485a7c77c3430)

 Conflicts:
	hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java
	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeHttpServer.java
	hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java
	hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsUrl.java
This commit is contained in:
Masatake Iwasaki 2019-08-12 12:07:16 +09:00
parent 99cd181a78
commit 670de354b3
6 changed files with 56 additions and 49 deletions

View File

@ -698,12 +698,27 @@ public void setAttribute(String name, Object value) {
*/
public void addJerseyResourcePackage(final String packageName,
final String pathSpec) {
addJerseyResourcePackage(packageName, pathSpec,
Collections.<String, String>emptyMap());
}
/**
* Add a Jersey resource package.
* @param packageName The Java package name containing the Jersey resource.
* @param pathSpec The path spec for the servlet
* @param params properties and features for ResourceConfig
*/
public void addJerseyResourcePackage(final String packageName,
final String pathSpec, Map<String, String> params) {
LOG.info("addJerseyResourcePackage: packageName=" + packageName
+ ", pathSpec=" + pathSpec);
final ServletHolder sh = new ServletHolder(ServletContainer.class);
sh.setInitParameter("com.sun.jersey.config.property.resourceConfigClass",
"com.sun.jersey.api.core.PackagesResourceConfig");
sh.setInitParameter("com.sun.jersey.config.property.packages", packageName);
for (Map.Entry<String, String> entry : params.entrySet()) {
sh.setInitParameter(entry.getKey(), entry.getValue());
}
webAppContext.addServlet(sh, pathSpec);
}

View File

@ -37,8 +37,6 @@
import java.net.MalformedURLException;
import java.net.URI;
import java.net.URL;
import java.net.URLDecoder;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.ArrayList;
@ -143,8 +141,6 @@ public class WebHdfsFileSystem extends FileSystem
public static final String EZ_HEADER = "X-Hadoop-Accept-EZ";
public static final String FEFINFO_HEADER = "X-Hadoop-feInfo";
public static final String SPECIAL_FILENAME_CHARACTERS_REGEX = ".*[;+%].*";
/**
* Default connection factory may be overridden in tests to use smaller
* timeout values
@ -609,38 +605,9 @@ Param<?,?>[] getAuthParameters(final HttpOpParam.Op op) throws IOException {
URL toUrl(final HttpOpParam.Op op, final Path fspath,
final Param<?,?>... parameters) throws IOException {
//initialize URI path and query
Path encodedFSPath = fspath;
if (fspath != null) {
URI fspathUri = fspath.toUri();
String fspathUriDecoded = fspathUri.getPath();
boolean pathAlreadyEncoded = false;
try {
fspathUriDecoded = URLDecoder.decode(fspathUri.getPath(), "UTF-8");
pathAlreadyEncoded = true;
} catch (IllegalArgumentException ex) {
LOG.trace("Cannot decode URL encoded file", ex);
}
String[] fspathItems = fspathUriDecoded.split("/");
if (fspathItems.length > 0) {
StringBuilder fsPathEncodedItems = new StringBuilder();
for (String fsPathItem : fspathItems) {
fsPathEncodedItems.append("/");
if (fsPathItem.matches(SPECIAL_FILENAME_CHARACTERS_REGEX) ||
pathAlreadyEncoded) {
fsPathEncodedItems.append(URLEncoder.encode(fsPathItem, "UTF-8"));
} else {
fsPathEncodedItems.append(fsPathItem);
}
}
encodedFSPath = new Path(fspathUri.getScheme(),
fspathUri.getAuthority(), fsPathEncodedItems.substring(1));
}
}
final String path = PATH_PREFIX
+ (encodedFSPath == null ? "/" :
makeQualified(encodedFSPath).toUri().getRawPath());
+ (fspath == null? "/": makeQualified(fspath).toUri().getRawPath());
final String query = op.toQueryString()
+ Param.toSortedString("&", getAuthParameters(op))
+ Param.toSortedString("&", parameters);

View File

@ -57,7 +57,6 @@
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.nio.charset.StandardCharsets;
import java.security.PrivilegedExceptionAction;
import java.util.EnumSet;
@ -127,7 +126,7 @@ public void channelRead0(final ChannelHandlerContext ctx,
params = new ParameterParser(queryString, conf);
DataNodeUGIProvider ugiProvider = new DataNodeUGIProvider(params);
ugi = ugiProvider.ugi();
path = URLDecoder.decode(params.path(), "UTF-8");
path = params.path();
injectToken();
ugi.doAs(new PrivilegedExceptionAction<Void>() {

View File

@ -52,6 +52,8 @@
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.http.RestCsrfPreventionFilter;
import com.sun.jersey.api.core.ResourceConfig;
/**
* Encapsulates the HTTP server started by the NameNode.
*/
@ -114,9 +116,11 @@ public static void initWebHdfs(Configuration conf, String hostname,
}
// add webhdfs packages
final Map<String, String> resourceParams = new HashMap<>();
resourceParams.put(ResourceConfig.FEATURE_MATCH_MATRIX_PARAMS, "true");
httpServer2.addJerseyResourcePackage(
jerseyResourcePackage+ ";" + Param.class.getPackage().getName(),
pathSpec);
jerseyResourcePackage + ";" + Param.class.getPackage().getName(),
pathSpec, resourceParams);
}
}

View File

@ -25,7 +25,6 @@
import java.net.InetAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URLDecoder;
import java.net.UnknownHostException;
import java.security.Principal;
import java.security.PrivilegedExceptionAction;
@ -960,10 +959,8 @@ public Response get(
return doAs(ugi, new PrivilegedExceptionAction<Response>() {
@Override
public Response run() throws IOException, URISyntaxException {
String absolutePath = path.getAbsolutePath() == null ? null :
URLDecoder.decode(path.getAbsolutePath(), "UTF-8");
return get(ugi, delegation, username, doAsUser, absolutePath,
op, offset, length, renewer, bufferSize,
return get(ugi, delegation, username, doAsUser,
path.getAbsolutePath(), op, offset, length, renewer, bufferSize,
xattrNames, xattrEncoding, excludeDatanodes, fsAction, tokenKind,
tokenService, noredirect, startAfter);
}

View File

@ -38,6 +38,7 @@
import org.apache.hadoop.fs.WebHdfs;
import org.apache.hadoop.fs.permission.FsAction;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenSecretManager;
@ -76,7 +77,7 @@ public void testEncodedPathUrl() throws IOException, URISyntaxException{
uri, conf);
// Construct a file path that contains percentage-encoded string
String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668";
String pathName = "/hdtest010%2C60020%2C1371000602151.1371058984668+";
Path fsPath = new Path(pathName);
URL encodedPathUrl = webhdfs.toUrl(PutOpParam.Op.CREATE, fsPath);
// We should get back the original file path after cycling back and decoding
@ -415,15 +416,11 @@ public void testWebHdfsSpecialCharacterFile() throws Exception {
}
private static final String BACKWARD_COMPATIBLE_SPECIAL_CHARACTER_FILENAME =
"specialFile ?\"\\()[]_-=&,{}#'`~!@$^*|<>.";
"specialFile ?\"\\()[]_-=&,{}#'`~!@$^*|<>.+%";
@Test
public void testWebHdfsBackwardCompatibleSpecialCharacterFile()
throws Exception {
assertFalse(BACKWARD_COMPATIBLE_SPECIAL_CHARACTER_FILENAME
.matches(WebHdfsFileSystem.SPECIAL_FILENAME_CHARACTERS_REGEX));
UserGroupInformation ugi =
UserGroupInformation.createRemoteUser("test-user");
ugi.setAuthenticationMethod(KERBEROS);
@ -469,4 +466,32 @@ public void testWebHdfsBackwardCompatibleSpecialCharacterFile()
}
}
@Test
public void testWebHdfsPathWithSemicolon() throws Exception {
try (MiniDFSCluster cluster =
new MiniDFSCluster.Builder(WebHdfsTestUtil.createConf())
.numDataNodes(1)
.build()) {
cluster.waitActive();
// regression test for HDFS-14423.
final Path semicolon = new Path("/a;b");
final Path plus = new Path("/a+b");
final Path percent = new Path("/a%b");
final WebHdfsFileSystem webhdfs = WebHdfsTestUtil.getWebHdfsFileSystem(
cluster.getConfiguration(0), WebHdfs.SCHEME);
webhdfs.create(semicolon).close();
webhdfs.create(plus).close();
webhdfs.create(percent).close();
final DistributedFileSystem dfs = cluster.getFileSystem();
assertEquals(semicolon.getName(),
dfs.getFileStatus(semicolon).getPath().getName());
assertEquals(plus.getName(),
dfs.getFileStatus(plus).getPath().getName());
assertEquals(percent.getName(),
dfs.getFileStatus(percent).getPath().getName());
}
}
}