HDFS-2356. Support case insensitive query parameter names in webhdfs.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1175113 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
7e0a224a36
commit
83a83d3b73
|
@ -38,7 +38,11 @@ Trunk (unreleased changes)
|
||||||
not use ArrayWritable for writing non-array items. (Uma Maheswara Rao G
|
not use ArrayWritable for writing non-array items. (Uma Maheswara Rao G
|
||||||
via szetszwo)
|
via szetszwo)
|
||||||
|
|
||||||
HDFS-2351 Change Namenode and Datanode to register each of their protocols seperately (Sanjay Radia)
|
HDFS-2351 Change Namenode and Datanode to register each of their protocols
|
||||||
|
seperately (Sanjay Radia)
|
||||||
|
|
||||||
|
HDFS-2356. Support case insensitive query parameter names in webhdfs.
|
||||||
|
(szetszwo)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
HDFS-2287. TestParallelRead has a small off-by-one bug. (todd)
|
HDFS-2287. TestParallelRead has a small off-by-one bug. (todd)
|
||||||
|
|
|
@ -54,6 +54,7 @@ import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
||||||
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
|
import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
|
import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer;
|
||||||
|
import org.apache.hadoop.hdfs.web.resources.DelegationParam;
|
||||||
import org.apache.hadoop.hdfs.web.resources.UserParam;
|
import org.apache.hadoop.hdfs.web.resources.UserParam;
|
||||||
import org.apache.hadoop.http.HtmlQuoting;
|
import org.apache.hadoop.http.HtmlQuoting;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
@ -68,7 +69,7 @@ import org.apache.hadoop.util.VersionInfo;
|
||||||
public class JspHelper {
|
public class JspHelper {
|
||||||
public static final String CURRENT_CONF = "current.conf";
|
public static final String CURRENT_CONF = "current.conf";
|
||||||
final static public String WEB_UGI_PROPERTY_NAME = DFSConfigKeys.DFS_WEB_UGI_KEY;
|
final static public String WEB_UGI_PROPERTY_NAME = DFSConfigKeys.DFS_WEB_UGI_KEY;
|
||||||
public static final String DELEGATION_PARAMETER_NAME = "delegation";
|
public static final String DELEGATION_PARAMETER_NAME = DelegationParam.NAME;
|
||||||
public static final String NAMENODE_ADDRESS = "nnaddr";
|
public static final String NAMENODE_ADDRESS = "nnaddr";
|
||||||
static final String SET_DELEGATION = "&" + DELEGATION_PARAMETER_NAME +
|
static final String SET_DELEGATION = "&" + DELEGATION_PARAMETER_NAME +
|
||||||
"=";
|
"=";
|
||||||
|
|
|
@ -50,6 +50,7 @@ import org.apache.hadoop.hdfs.DFSClient;
|
||||||
import org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream;
|
import org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
|
import org.apache.hadoop.hdfs.web.ParamFilter;
|
||||||
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
||||||
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
|
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
|
||||||
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
|
import org.apache.hadoop.hdfs.web.resources.BufferSizeParam;
|
||||||
|
@ -66,8 +67,11 @@ import org.apache.hadoop.hdfs.web.resources.UriFsPathParam;
|
||||||
import org.apache.hadoop.io.IOUtils;
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
|
||||||
|
import com.sun.jersey.spi.container.ResourceFilters;
|
||||||
|
|
||||||
/** Web-hdfs DataNode implementation. */
|
/** Web-hdfs DataNode implementation. */
|
||||||
@Path("")
|
@Path("")
|
||||||
|
@ResourceFilters(ParamFilter.class)
|
||||||
public class DatanodeWebHdfsMethods {
|
public class DatanodeWebHdfsMethods {
|
||||||
public static final Log LOG = LogFactory.getLog(DatanodeWebHdfsMethods.class);
|
public static final Log LOG = LogFactory.getLog(DatanodeWebHdfsMethods.class);
|
||||||
|
|
||||||
|
|
|
@ -57,6 +57,7 @@ import org.apache.hadoop.hdfs.server.common.JspHelper;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
|
||||||
import org.apache.hadoop.hdfs.web.JsonUtil;
|
import org.apache.hadoop.hdfs.web.JsonUtil;
|
||||||
|
import org.apache.hadoop.hdfs.web.ParamFilter;
|
||||||
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
import org.apache.hadoop.hdfs.web.WebHdfsFileSystem;
|
||||||
import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
|
import org.apache.hadoop.hdfs.web.resources.AccessTimeParam;
|
||||||
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
|
import org.apache.hadoop.hdfs.web.resources.BlockSizeParam;
|
||||||
|
@ -90,8 +91,11 @@ import org.apache.hadoop.security.UserGroupInformation;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||||
|
|
||||||
|
import com.sun.jersey.spi.container.ResourceFilters;
|
||||||
|
|
||||||
/** Web-hdfs NameNode implementation. */
|
/** Web-hdfs NameNode implementation. */
|
||||||
@Path("")
|
@Path("")
|
||||||
|
@ResourceFilters(ParamFilter.class)
|
||||||
public class NamenodeWebHdfsMethods {
|
public class NamenodeWebHdfsMethods {
|
||||||
public static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class);
|
public static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class);
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,85 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.hdfs.web;
|
||||||
|
|
||||||
|
import java.net.URI;
|
||||||
|
import java.util.List;
|
||||||
|
import java.util.Map;
|
||||||
|
|
||||||
|
import javax.ws.rs.core.MultivaluedMap;
|
||||||
|
import javax.ws.rs.core.UriBuilder;
|
||||||
|
|
||||||
|
import com.sun.jersey.spi.container.ContainerRequest;
|
||||||
|
import com.sun.jersey.spi.container.ContainerRequestFilter;
|
||||||
|
import com.sun.jersey.spi.container.ContainerResponseFilter;
|
||||||
|
import com.sun.jersey.spi.container.ResourceFilter;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* A filter to change parameter names to lower cases
|
||||||
|
* so that parameter names are considered as case insensitive.
|
||||||
|
*/
|
||||||
|
public class ParamFilter implements ResourceFilter {
|
||||||
|
private static final ContainerRequestFilter LOWER_CASE
|
||||||
|
= new ContainerRequestFilter() {
|
||||||
|
@Override
|
||||||
|
public ContainerRequest filter(final ContainerRequest request) {
|
||||||
|
final MultivaluedMap<String, String> parameters = request.getQueryParameters();
|
||||||
|
if (containsUpperCase(parameters.keySet())) {
|
||||||
|
//rebuild URI
|
||||||
|
final URI lower = rebuildQuery(request.getRequestUri(), parameters);
|
||||||
|
request.setUris(request.getBaseUri(), lower);
|
||||||
|
}
|
||||||
|
return request;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ContainerRequestFilter getRequestFilter() {
|
||||||
|
return LOWER_CASE;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ContainerResponseFilter getResponseFilter() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Do the strings contain upper case letters? */
|
||||||
|
private static boolean containsUpperCase(final Iterable<String> strings) {
|
||||||
|
for(String s : strings) {
|
||||||
|
for(int i = 0; i < s.length(); i++) {
|
||||||
|
if (Character.isUpperCase(s.charAt(i))) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
/** Rebuild the URI query with lower case parameter names. */
|
||||||
|
private static URI rebuildQuery(final URI uri,
|
||||||
|
final MultivaluedMap<String, String> parameters) {
|
||||||
|
UriBuilder b = UriBuilder.fromUri(uri).replaceQuery("");
|
||||||
|
for(Map.Entry<String, List<String>> e : parameters.entrySet()) {
|
||||||
|
final String key = e.getKey().toLowerCase();
|
||||||
|
for(String v : e.getValue()) {
|
||||||
|
b = b.queryParam(key, v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return b.build();
|
||||||
|
}
|
||||||
|
}
|
|
@ -172,7 +172,7 @@ public class WebHdfsFileSystem extends HftpFileSystem {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private URL toUrl(final HttpOpParam.Op op, final Path fspath,
|
URL toUrl(final HttpOpParam.Op op, final Path fspath,
|
||||||
final Param<?,?>... parameters) throws IOException {
|
final Param<?,?>... parameters) throws IOException {
|
||||||
//initialize URI path and query
|
//initialize URI path and query
|
||||||
final String path = "/" + PATH_PREFIX
|
final String path = "/" + PATH_PREFIX
|
||||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.hdfs.web.resources;
|
||||||
/** Access time parameter. */
|
/** Access time parameter. */
|
||||||
public class AccessTimeParam extends LongParam {
|
public class AccessTimeParam extends LongParam {
|
||||||
/** Parameter name. */
|
/** Parameter name. */
|
||||||
public static final String NAME = "accessTime";
|
public static final String NAME = "accesstime";
|
||||||
/** Default parameter value. */
|
/** Default parameter value. */
|
||||||
public static final String DEFAULT = "-1";
|
public static final String DEFAULT = "-1";
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration;
|
||||||
/** Block size parameter. */
|
/** Block size parameter. */
|
||||||
public class BlockSizeParam extends LongParam {
|
public class BlockSizeParam extends LongParam {
|
||||||
/** Parameter name. */
|
/** Parameter name. */
|
||||||
public static final String NAME = "blockSize";
|
public static final String NAME = "blocksize";
|
||||||
/** Default parameter value. */
|
/** Default parameter value. */
|
||||||
public static final String DEFAULT = NULL;
|
public static final String DEFAULT = NULL;
|
||||||
|
|
||||||
|
|
|
@ -23,7 +23,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
/** Buffer size parameter. */
|
/** Buffer size parameter. */
|
||||||
public class BufferSizeParam extends IntegerParam {
|
public class BufferSizeParam extends IntegerParam {
|
||||||
/** Parameter name. */
|
/** Parameter name. */
|
||||||
public static final String NAME = "bufferSize";
|
public static final String NAME = "buffersize";
|
||||||
/** Default parameter value. */
|
/** Default parameter value. */
|
||||||
public static final String DEFAULT = NULL;
|
public static final String DEFAULT = NULL;
|
||||||
|
|
||||||
|
|
|
@ -17,13 +17,12 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.web.resources;
|
package org.apache.hadoop.hdfs.web.resources;
|
||||||
|
|
||||||
import org.apache.hadoop.hdfs.server.common.JspHelper;
|
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
|
||||||
/** Delegation token parameter. */
|
/** Delegation token parameter. */
|
||||||
public class DelegationParam extends StringParam {
|
public class DelegationParam extends StringParam {
|
||||||
/** Parameter name. */
|
/** Parameter name. */
|
||||||
public static final String NAME = JspHelper.DELEGATION_PARAMETER_NAME;
|
public static final String NAME = "delegation";
|
||||||
/** Default parameter value. */
|
/** Default parameter value. */
|
||||||
public static final String DEFAULT = "";
|
public static final String DEFAULT = "";
|
||||||
|
|
||||||
|
|
|
@ -21,9 +21,6 @@ import java.net.HttpURLConnection;
|
||||||
|
|
||||||
/** Http DELETE operation parameter. */
|
/** Http DELETE operation parameter. */
|
||||||
public class DeleteOpParam extends HttpOpParam<DeleteOpParam.Op> {
|
public class DeleteOpParam extends HttpOpParam<DeleteOpParam.Op> {
|
||||||
/** Parameter name. */
|
|
||||||
public static final String NAME = "deleteOp";
|
|
||||||
|
|
||||||
/** Delete operations. */
|
/** Delete operations. */
|
||||||
public static enum Op implements HttpOpParam.Op {
|
public static enum Op implements HttpOpParam.Op {
|
||||||
DELETE(HttpURLConnection.HTTP_OK),
|
DELETE(HttpURLConnection.HTTP_OK),
|
||||||
|
|
|
@ -22,7 +22,7 @@ import org.apache.hadoop.fs.Path;
|
||||||
/** Destination path parameter. */
|
/** Destination path parameter. */
|
||||||
public class DstPathParam extends StringParam {
|
public class DstPathParam extends StringParam {
|
||||||
/** Parameter name. */
|
/** Parameter name. */
|
||||||
public static final String NAME = "dstPath";
|
public static final String NAME = "dstpath";
|
||||||
/** Default parameter value. */
|
/** Default parameter value. */
|
||||||
public static final String DEFAULT = "";
|
public static final String DEFAULT = "";
|
||||||
|
|
||||||
|
|
|
@ -21,9 +21,6 @@ import java.net.HttpURLConnection;
|
||||||
|
|
||||||
/** Http GET operation parameter. */
|
/** Http GET operation parameter. */
|
||||||
public class GetOpParam extends HttpOpParam<GetOpParam.Op> {
|
public class GetOpParam extends HttpOpParam<GetOpParam.Op> {
|
||||||
/** Parameter name. */
|
|
||||||
public static final String NAME = "getOp";
|
|
||||||
|
|
||||||
/** Get operations. */
|
/** Get operations. */
|
||||||
public static enum Op implements HttpOpParam.Op {
|
public static enum Op implements HttpOpParam.Op {
|
||||||
OPEN(HttpURLConnection.HTTP_OK),
|
OPEN(HttpURLConnection.HTTP_OK),
|
||||||
|
|
|
@ -20,6 +20,9 @@ package org.apache.hadoop.hdfs.web.resources;
|
||||||
/** Http operation parameter. */
|
/** Http operation parameter. */
|
||||||
public abstract class HttpOpParam<E extends Enum<E> & HttpOpParam.Op>
|
public abstract class HttpOpParam<E extends Enum<E> & HttpOpParam.Op>
|
||||||
extends EnumParam<E> {
|
extends EnumParam<E> {
|
||||||
|
/** Parameter name. */
|
||||||
|
public static final String NAME = "op";
|
||||||
|
|
||||||
/** Default parameter value. */
|
/** Default parameter value. */
|
||||||
public static final String DEFAULT = NULL;
|
public static final String DEFAULT = NULL;
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,7 @@ package org.apache.hadoop.hdfs.web.resources;
|
||||||
/** Modification time parameter. */
|
/** Modification time parameter. */
|
||||||
public class ModificationTimeParam extends LongParam {
|
public class ModificationTimeParam extends LongParam {
|
||||||
/** Parameter name. */
|
/** Parameter name. */
|
||||||
public static final String NAME = "modificationTime";
|
public static final String NAME = "modificationtime";
|
||||||
/** Default parameter value. */
|
/** Default parameter value. */
|
||||||
public static final String DEFAULT = "-1";
|
public static final String DEFAULT = "-1";
|
||||||
|
|
||||||
|
|
|
@ -21,9 +21,6 @@ import java.net.HttpURLConnection;
|
||||||
|
|
||||||
/** Http POST operation parameter. */
|
/** Http POST operation parameter. */
|
||||||
public class PostOpParam extends HttpOpParam<PostOpParam.Op> {
|
public class PostOpParam extends HttpOpParam<PostOpParam.Op> {
|
||||||
/** Parameter name. */
|
|
||||||
public static final String NAME = "postOp";
|
|
||||||
|
|
||||||
/** Post operations. */
|
/** Post operations. */
|
||||||
public static enum Op implements HttpOpParam.Op {
|
public static enum Op implements HttpOpParam.Op {
|
||||||
APPEND(HttpURLConnection.HTTP_OK),
|
APPEND(HttpURLConnection.HTTP_OK),
|
||||||
|
|
|
@ -21,9 +21,6 @@ import java.net.HttpURLConnection;
|
||||||
|
|
||||||
/** Http POST operation parameter. */
|
/** Http POST operation parameter. */
|
||||||
public class PutOpParam extends HttpOpParam<PutOpParam.Op> {
|
public class PutOpParam extends HttpOpParam<PutOpParam.Op> {
|
||||||
/** Parameter name. */
|
|
||||||
public static final String NAME = "putOp";
|
|
||||||
|
|
||||||
/** Put operations. */
|
/** Put operations. */
|
||||||
public static enum Op implements HttpOpParam.Op {
|
public static enum Op implements HttpOpParam.Op {
|
||||||
CREATE(true, HttpURLConnection.HTTP_CREATED),
|
CREATE(true, HttpURLConnection.HTTP_CREATED),
|
||||||
|
|
|
@ -22,7 +22,7 @@ import org.apache.hadoop.fs.Options;
|
||||||
/** Rename option set parameter. */
|
/** Rename option set parameter. */
|
||||||
public class RenameOptionSetParam extends EnumSetParam<Options.Rename> {
|
public class RenameOptionSetParam extends EnumSetParam<Options.Rename> {
|
||||||
/** Parameter name. */
|
/** Parameter name. */
|
||||||
public static final String NAME = "renameOptions";
|
public static final String NAME = "renameoptions";
|
||||||
/** Default parameter value. */
|
/** Default parameter value. */
|
||||||
public static final String DEFAULT = "";
|
public static final String DEFAULT = "";
|
||||||
|
|
||||||
|
|
|
@ -18,8 +18,12 @@
|
||||||
|
|
||||||
package org.apache.hadoop.hdfs.web;
|
package org.apache.hadoop.hdfs.web;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.InputStreamReader;
|
||||||
|
import java.net.HttpURLConnection;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
|
import java.net.URL;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
@ -30,6 +34,7 @@ import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
import org.apache.hadoop.hdfs.web.resources.PutOpParam;
|
||||||
import org.apache.hadoop.security.AccessControlException;
|
import org.apache.hadoop.security.AccessControlException;
|
||||||
import org.apache.hadoop.security.UserGroupInformation;
|
import org.apache.hadoop.security.UserGroupInformation;
|
||||||
|
|
||||||
|
@ -127,4 +132,30 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest {
|
||||||
assertEquals(expected[i].toString(), computed[i].toString());
|
assertEquals(expected[i].toString(), computed[i].toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public void testCaseInsensitive() throws IOException {
|
||||||
|
final Path p = new Path("/test/testCaseInsensitive");
|
||||||
|
final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs;
|
||||||
|
final PutOpParam.Op op = PutOpParam.Op.MKDIRS;
|
||||||
|
|
||||||
|
//replace query with mix case letters
|
||||||
|
final URL url = webhdfs.toUrl(op, p);
|
||||||
|
WebHdfsFileSystem.LOG.info("url = " + url);
|
||||||
|
final URL replaced = new URL(url.toString().replace(op.toQueryString(),
|
||||||
|
"Op=mkDIrs"));
|
||||||
|
WebHdfsFileSystem.LOG.info("replaced = " + replaced);
|
||||||
|
|
||||||
|
//connect with the replaced URL.
|
||||||
|
final HttpURLConnection conn = (HttpURLConnection)replaced.openConnection();
|
||||||
|
conn.setRequestMethod(op.getType().toString());
|
||||||
|
conn.connect();
|
||||||
|
final BufferedReader in = new BufferedReader(new InputStreamReader(
|
||||||
|
conn.getInputStream()));
|
||||||
|
for(String line; (line = in.readLine()) != null; ) {
|
||||||
|
WebHdfsFileSystem.LOG.info("> " + line);
|
||||||
|
}
|
||||||
|
|
||||||
|
//check if the command successes.
|
||||||
|
assertTrue(fs.getFileStatus(p).isDirectory());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue