From 5991ed9cbd18520040159508ef8bd02b7b3bf5e5 Mon Sep 17 00:00:00 2001 From: Tsz-wo Sze Date: Sat, 9 Jun 2012 00:54:10 +0000 Subject: [PATCH] HDFS-3490. DatanodeWebHdfsMethods throws NullPointerException if NamenodeRpcAddressParam is not set. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1348287 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 +++ .../web/resources/DatanodeWebHdfsMethods.java | 4 +++ .../web/resources/NamenodeWebHdfsMethods.java | 4 +-- .../web/resources/InetSocketAddressParam.java | 4 +++ .../hadoop/hdfs/web/resources/LongParam.java | 4 +-- .../web/TestWebHdfsFileSystemContract.java | 27 +++++++++++++++++++ 6 files changed, 42 insertions(+), 4 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 787e646e975..0277f829a04 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -305,6 +305,9 @@ Branch-2 ( Unreleased changes ) HDFS-3243. TestParallelRead timing out on jenkins. (Henry Robinson via todd) + HDFS-3490. DatanodeWebHdfsMethods throws NullPointerException if + NamenodeRpcAddressParam is not set. (szetszwo) + BREAKDOWN OF HDFS-3042 SUBTASKS HDFS-2185. HDFS portion of ZK-based FailoverController (todd) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java index 32b5c60cc6a..eb4afe75f85 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/web/resources/DatanodeWebHdfsMethods.java @@ -98,6 +98,10 @@ public class DatanodeWebHdfsMethods { LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path + ", ugi=" + ugi + Param.toSortedString(", ", parameters)); } + if (nnRpcAddr == null) { + throw new IllegalArgumentException(NamenodeRpcAddressParam.NAME + + " is not specified."); + } //clear content type response.setContentType(null); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index 0127e959be8..de8f256705b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -123,7 +123,7 @@ public class NamenodeWebHdfsMethods { final DelegationParam delegation, final UserParam username, final DoAsParam doAsUser, final UriFsPathParam path, final HttpOpParam op, - final Param... parameters) throws IOException { + final Param... parameters) { if (LOG.isTraceEnabled()) { LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path + ", ugi=" + ugi + ", " + username + ", " + doAsUser @@ -532,7 +532,7 @@ public class NamenodeWebHdfsMethods { final RenewerParam renewer, @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) final BufferSizeParam bufferSize - ) throws IOException, URISyntaxException, InterruptedException { + ) throws IOException, InterruptedException { return get(ugi, delegation, username, doAsUser, ROOT, op, offset, length, renewer, bufferSize); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/InetSocketAddressParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/InetSocketAddressParam.java index dc21f684e7e..9879ba3032c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/InetSocketAddressParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/InetSocketAddressParam.java @@ -44,6 +44,10 @@ abstract class InetSocketAddressParam @Override InetSocketAddress parse(final String str) { + if (str == null) { + throw new IllegalArgumentException("The input string is null: expect " + + getDomain()); + } final int i = str.indexOf(':'); if (i < 0) { throw new IllegalArgumentException("Failed to parse \"" + str diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LongParam.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LongParam.java index 023402cfe01..6f102e1c9f3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LongParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/resources/LongParam.java @@ -59,7 +59,7 @@ abstract class LongParam extends Param { @Override public String getDomain() { - return "<" + NULL + " | short in radix " + radix + ">"; + return "<" + NULL + " | long in radix " + radix + ">"; } @Override @@ -72,7 +72,7 @@ abstract class LongParam extends Param { } } - /** Convert a Short to a String. */ + /** Convert a Long to a String. */ String toString(final Long n) { return n == null? NULL: Long.toString(n, radix); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java index 04ffd10b35d..61734180cfd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.HttpOpParam; +import org.apache.hadoop.hdfs.web.resources.NamenodeRpcAddressParam; import org.apache.hadoop.hdfs.web.resources.PutOpParam; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; @@ -351,5 +352,31 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest { {//test append. AppendTestUtil.testAppend(fs, new Path(dir, "append")); } + + {//test NamenodeRpcAddressParam not set. + final HttpOpParam.Op op = PutOpParam.Op.CREATE; + final URL url = webhdfs.toUrl(op, dir); + HttpURLConnection conn = (HttpURLConnection) url.openConnection(); + conn.setRequestMethod(op.getType().toString()); + conn.setDoOutput(false); + conn.setInstanceFollowRedirects(false); + conn.connect(); + final String redirect = conn.getHeaderField("Location"); + conn.disconnect(); + + //remove NamenodeRpcAddressParam + WebHdfsFileSystem.LOG.info("redirect = " + redirect); + final int i = redirect.indexOf(NamenodeRpcAddressParam.NAME); + final int j = redirect.indexOf("&", i); + String modified = redirect.substring(0, i - 1) + redirect.substring(j); + WebHdfsFileSystem.LOG.info("modified = " + modified); + + //connect to datanode + conn = (HttpURLConnection)new URL(modified).openConnection(); + conn.setRequestMethod(op.getType().toString()); + conn.setDoOutput(op.getDoOutput()); + conn.connect(); + assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode()); + } } }