diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 9ced9e42bb1..3dccb99b66e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -38,7 +38,11 @@ Trunk (unreleased changes) not use ArrayWritable for writing non-array items. (Uma Maheswara Rao G via szetszwo) - HDFS-2351 Change Namenode and Datanode to register each of their protocols seperately (Sanjay Radia) + HDFS-2351 Change Namenode and Datanode to register each of their protocols + seperately (Sanjay Radia) + + HDFS-2356. Support case insensitive query parameter names in webhdfs. + (szetszwo) BUG FIXES HDFS-2287. TestParallelRead has a small off-by-one bug. (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java index e2ce26df6b9..82ec3bd7711 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/common/JspHelper.java @@ -54,6 +54,7 @@ import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeDescriptor; import org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer; +import org.apache.hadoop.hdfs.web.resources.DelegationParam; import org.apache.hadoop.hdfs.web.resources.UserParam; import org.apache.hadoop.http.HtmlQuoting; import org.apache.hadoop.io.Text; @@ -68,7 +69,7 @@ import org.apache.hadoop.util.VersionInfo; public class JspHelper { public static final String CURRENT_CONF = "current.conf"; final static public String WEB_UGI_PROPERTY_NAME = DFSConfigKeys.DFS_WEB_UGI_KEY; - public static final String DELEGATION_PARAMETER_NAME = "delegation"; + public static final String DELEGATION_PARAMETER_NAME = DelegationParam.NAME; public static final String NAMENODE_ADDRESS = "nnaddr"; static final String SET_DELEGATION = "&" + DELEGATION_PARAMETER_NAME + "="; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java index 4c5c61aac7c..0305024e4f0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java @@ -50,6 +50,7 @@ import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSClient.DFSDataInputStream; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.namenode.NameNode; +import org.apache.hadoop.hdfs.web.ParamFilter; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; import org.apache.hadoop.hdfs.web.resources.BufferSizeParam; @@ -66,8 +67,11 @@ import org.apache.hadoop.hdfs.web.resources.UriFsPathParam; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.security.UserGroupInformation; +import com.sun.jersey.spi.container.ResourceFilters; + /** Web-hdfs DataNode implementation. */ @Path("") +@ResourceFilters(ParamFilter.class) public class DatanodeWebHdfsMethods { public static final Log LOG = LogFactory.getLog(DatanodeWebHdfsMethods.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index c72437faf17..2dd1db33410 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -57,6 +57,7 @@ import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols; import org.apache.hadoop.hdfs.web.JsonUtil; +import org.apache.hadoop.hdfs.web.ParamFilter; import org.apache.hadoop.hdfs.web.WebHdfsFileSystem; import org.apache.hadoop.hdfs.web.resources.AccessTimeParam; import org.apache.hadoop.hdfs.web.resources.BlockSizeParam; @@ -90,8 +91,11 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import com.sun.jersey.spi.container.ResourceFilters; + /** Web-hdfs NameNode implementation. */ @Path("") +@ResourceFilters(ParamFilter.class) public class NamenodeWebHdfsMethods { public static final Log LOG = LogFactory.getLog(NamenodeWebHdfsMethods.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java new file mode 100644 index 00000000000..687b8747673 --- /dev/null +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/ParamFilter.java @@ -0,0 +1,85 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hdfs.web; + +import java.net.URI; +import java.util.List; +import java.util.Map; + +import javax.ws.rs.core.MultivaluedMap; +import javax.ws.rs.core.UriBuilder; + +import com.sun.jersey.spi.container.ContainerRequest; +import com.sun.jersey.spi.container.ContainerRequestFilter; +import com.sun.jersey.spi.container.ContainerResponseFilter; +import com.sun.jersey.spi.container.ResourceFilter; + +/** + * A filter to change parameter names to lower cases + * so that parameter names are considered as case insensitive. + */ +public class ParamFilter implements ResourceFilter { + private static final ContainerRequestFilter LOWER_CASE + = new ContainerRequestFilter() { + @Override + public ContainerRequest filter(final ContainerRequest request) { + final MultivaluedMap parameters = request.getQueryParameters(); + if (containsUpperCase(parameters.keySet())) { + //rebuild URI + final URI lower = rebuildQuery(request.getRequestUri(), parameters); + request.setUris(request.getBaseUri(), lower); + } + return request; + } + }; + + @Override + public ContainerRequestFilter getRequestFilter() { + return LOWER_CASE; + } + + @Override + public ContainerResponseFilter getResponseFilter() { + return null; + } + + /** Do the strings contain upper case letters? */ + private static boolean containsUpperCase(final Iterable strings) { + for(String s : strings) { + for(int i = 0; i < s.length(); i++) { + if (Character.isUpperCase(s.charAt(i))) { + return true; + } + } + } + return false; + } + + /** Rebuild the URI query with lower case parameter names. */ + private static URI rebuildQuery(final URI uri, + final MultivaluedMap parameters) { + UriBuilder b = UriBuilder.fromUri(uri).replaceQuery(""); + for(Map.Entry> e : parameters.entrySet()) { + final String key = e.getKey().toLowerCase(); + for(String v : e.getValue()) { + b = b.queryParam(key, v); + } + } + return b.build(); + } +} \ No newline at end of file diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index 061d44bbe4f..b2b1fac75e7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -172,7 +172,7 @@ public class WebHdfsFileSystem extends HftpFileSystem { } } - private URL toUrl(final HttpOpParam.Op op, final Path fspath, + URL toUrl(final HttpOpParam.Op op, final Path fspath, final Param... parameters) throws IOException { //initialize URI path and query final String path = "/" + PATH_PREFIX diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java index 830e5cd32dd..8d82131c703 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/AccessTimeParam.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hdfs.web.resources; /** Access time parameter. */ public class AccessTimeParam extends LongParam { /** Parameter name. */ - public static final String NAME = "accessTime"; + public static final String NAME = "accesstime"; /** Default parameter value. */ public static final String DEFAULT = "-1"; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java index e50b282f33b..96114968074 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BlockSizeParam.java @@ -25,7 +25,7 @@ import org.apache.hadoop.conf.Configuration; /** Block size parameter. */ public class BlockSizeParam extends LongParam { /** Parameter name. */ - public static final String NAME = "blockSize"; + public static final String NAME = "blocksize"; /** Default parameter value. */ public static final String DEFAULT = NULL; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java index 424e5ba2533..148834b1024 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/BufferSizeParam.java @@ -23,7 +23,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeysPublic; /** Buffer size parameter. */ public class BufferSizeParam extends IntegerParam { /** Parameter name. */ - public static final String NAME = "bufferSize"; + public static final String NAME = "buffersize"; /** Default parameter value. */ public static final String DEFAULT = NULL; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java index 80f0c4b0b33..ad08773ea24 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DelegationParam.java @@ -17,13 +17,12 @@ */ package org.apache.hadoop.hdfs.web.resources; -import org.apache.hadoop.hdfs.server.common.JspHelper; import org.apache.hadoop.security.UserGroupInformation; /** Delegation token parameter. */ public class DelegationParam extends StringParam { /** Parameter name. */ - public static final String NAME = JspHelper.DELEGATION_PARAMETER_NAME; + public static final String NAME = "delegation"; /** Default parameter value. */ public static final String DEFAULT = ""; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java index e61e858ee49..12962b4a4ee 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DeleteOpParam.java @@ -21,9 +21,6 @@ import java.net.HttpURLConnection; /** Http DELETE operation parameter. */ public class DeleteOpParam extends HttpOpParam { - /** Parameter name. */ - public static final String NAME = "deleteOp"; - /** Delete operations. */ public static enum Op implements HttpOpParam.Op { DELETE(HttpURLConnection.HTTP_OK), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java index 7d522a38770..5fa52456f92 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/DstPathParam.java @@ -22,7 +22,7 @@ import org.apache.hadoop.fs.Path; /** Destination path parameter. */ public class DstPathParam extends StringParam { /** Parameter name. */ - public static final String NAME = "dstPath"; + public static final String NAME = "dstpath"; /** Default parameter value. */ public static final String DEFAULT = ""; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java index aeb3135404c..d547f1b1b4d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java @@ -21,9 +21,6 @@ import java.net.HttpURLConnection; /** Http GET operation parameter. */ public class GetOpParam extends HttpOpParam { - /** Parameter name. */ - public static final String NAME = "getOp"; - /** Get operations. */ public static enum Op implements HttpOpParam.Op { OPEN(HttpURLConnection.HTTP_OK), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java index 644c4032dbc..422ec0f2f2f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/HttpOpParam.java @@ -20,6 +20,9 @@ package org.apache.hadoop.hdfs.web.resources; /** Http operation parameter. */ public abstract class HttpOpParam & HttpOpParam.Op> extends EnumParam { + /** Parameter name. */ + public static final String NAME = "op"; + /** Default parameter value. */ public static final String DEFAULT = NULL; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java index d43da073280..a0e38a97e7d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/ModificationTimeParam.java @@ -20,7 +20,7 @@ package org.apache.hadoop.hdfs.web.resources; /** Modification time parameter. */ public class ModificationTimeParam extends LongParam { /** Parameter name. */ - public static final String NAME = "modificationTime"; + public static final String NAME = "modificationtime"; /** Default parameter value. */ public static final String DEFAULT = "-1"; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java index 116d6af8b36..b553ecc6701 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PostOpParam.java @@ -21,9 +21,6 @@ import java.net.HttpURLConnection; /** Http POST operation parameter. */ public class PostOpParam extends HttpOpParam { - /** Parameter name. */ - public static final String NAME = "postOp"; - /** Post operations. */ public static enum Op implements HttpOpParam.Op { APPEND(HttpURLConnection.HTTP_OK), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java index 00703fefbc7..dcfaa6f06cd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/PutOpParam.java @@ -21,9 +21,6 @@ import java.net.HttpURLConnection; /** Http POST operation parameter. */ public class PutOpParam extends HttpOpParam { - /** Parameter name. */ - public static final String NAME = "putOp"; - /** Put operations. */ public static enum Op implements HttpOpParam.Op { CREATE(true, HttpURLConnection.HTTP_CREATED), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenameOptionSetParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenameOptionSetParam.java index ec66a51c788..d7c157d5086 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenameOptionSetParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/RenameOptionSetParam.java @@ -22,7 +22,7 @@ import org.apache.hadoop.fs.Options; /** Rename option set parameter. */ public class RenameOptionSetParam extends EnumSetParam { /** Parameter name. */ - public static final String NAME = "renameOptions"; + public static final String NAME = "renameoptions"; /** Default parameter value. */ public static final String DEFAULT = ""; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java index 47ae417430f..abe07fc51f1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java @@ -18,8 +18,12 @@ package org.apache.hadoop.hdfs.web; +import java.io.BufferedReader; import java.io.IOException; +import java.io.InputStreamReader; +import java.net.HttpURLConnection; import java.net.URI; +import java.net.URL; import java.security.PrivilegedExceptionAction; import org.apache.hadoop.conf.Configuration; @@ -30,6 +34,7 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.hdfs.web.resources.PutOpParam; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; @@ -127,4 +132,30 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest { assertEquals(expected[i].toString(), computed[i].toString()); } } + + public void testCaseInsensitive() throws IOException { + final Path p = new Path("/test/testCaseInsensitive"); + final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; + final PutOpParam.Op op = PutOpParam.Op.MKDIRS; + + //replace query with mix case letters + final URL url = webhdfs.toUrl(op, p); + WebHdfsFileSystem.LOG.info("url = " + url); + final URL replaced = new URL(url.toString().replace(op.toQueryString(), + "Op=mkDIrs")); + WebHdfsFileSystem.LOG.info("replaced = " + replaced); + + //connect with the replaced URL. + final HttpURLConnection conn = (HttpURLConnection)replaced.openConnection(); + conn.setRequestMethod(op.getType().toString()); + conn.connect(); + final BufferedReader in = new BufferedReader(new InputStreamReader( + conn.getInputStream())); + for(String line; (line = in.readLine()) != null; ) { + WebHdfsFileSystem.LOG.info("> " + line); + } + + //check if the command successes. + assertTrue(fs.getFileStatus(p).isDirectory()); + } }