svn merge -c 1348287 from trunk for HDFS-3490. DatanodeWebHdfsMethods throws NullPointerException if NamenodeRpcAddressParam is not set.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1348288 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2012-06-09 00:55:47 +00:00
parent 4b531693d3
commit 1f564d1135
6 changed files with 42 additions and 4 deletions

View File

@ -180,6 +180,9 @@ Release 2.0.1-alpha - UNRELEASED
HDFS-3243. TestParallelRead timing out on jenkins. (Henry Robinson via todd) HDFS-3243. TestParallelRead timing out on jenkins. (Henry Robinson via todd)
HDFS-3490. DatanodeWebHdfsMethods throws NullPointerException if
NamenodeRpcAddressParam is not set. (szetszwo)
BREAKDOWN OF HDFS-3042 SUBTASKS BREAKDOWN OF HDFS-3042 SUBTASKS
HDFS-2185. HDFS portion of ZK-based FailoverController (todd) HDFS-2185. HDFS portion of ZK-based FailoverController (todd)

View File

@ -98,6 +98,10 @@ public class DatanodeWebHdfsMethods {
LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path
+ ", ugi=" + ugi + Param.toSortedString(", ", parameters)); + ", ugi=" + ugi + Param.toSortedString(", ", parameters));
} }
if (nnRpcAddr == null) {
throw new IllegalArgumentException(NamenodeRpcAddressParam.NAME
+ " is not specified.");
}
//clear content type //clear content type
response.setContentType(null); response.setContentType(null);

View File

@ -123,7 +123,7 @@ public class NamenodeWebHdfsMethods {
final DelegationParam delegation, final DelegationParam delegation,
final UserParam username, final DoAsParam doAsUser, final UserParam username, final DoAsParam doAsUser,
final UriFsPathParam path, final HttpOpParam<?> op, final UriFsPathParam path, final HttpOpParam<?> op,
final Param<?, ?>... parameters) throws IOException { final Param<?, ?>... parameters) {
if (LOG.isTraceEnabled()) { if (LOG.isTraceEnabled()) {
LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path LOG.trace("HTTP " + op.getValue().getType() + ": " + op + ", " + path
+ ", ugi=" + ugi + ", " + username + ", " + doAsUser + ", ugi=" + ugi + ", " + username + ", " + doAsUser
@ -532,7 +532,7 @@ public class NamenodeWebHdfsMethods {
final RenewerParam renewer, final RenewerParam renewer,
@QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT) @QueryParam(BufferSizeParam.NAME) @DefaultValue(BufferSizeParam.DEFAULT)
final BufferSizeParam bufferSize final BufferSizeParam bufferSize
) throws IOException, URISyntaxException, InterruptedException { ) throws IOException, InterruptedException {
return get(ugi, delegation, username, doAsUser, ROOT, op, return get(ugi, delegation, username, doAsUser, ROOT, op,
offset, length, renewer, bufferSize); offset, length, renewer, bufferSize);
} }

View File

@ -44,6 +44,10 @@ abstract class InetSocketAddressParam
@Override @Override
InetSocketAddress parse(final String str) { InetSocketAddress parse(final String str) {
if (str == null) {
throw new IllegalArgumentException("The input string is null: expect "
+ getDomain());
}
final int i = str.indexOf(':'); final int i = str.indexOf(':');
if (i < 0) { if (i < 0) {
throw new IllegalArgumentException("Failed to parse \"" + str throw new IllegalArgumentException("Failed to parse \"" + str

View File

@ -59,7 +59,7 @@ abstract class LongParam extends Param<Long, LongParam.Domain> {
@Override @Override
public String getDomain() { public String getDomain() {
return "<" + NULL + " | short in radix " + radix + ">"; return "<" + NULL + " | long in radix " + radix + ">";
} }
@Override @Override
@ -72,7 +72,7 @@ abstract class LongParam extends Param<Long, LongParam.Domain> {
} }
} }
/** Convert a Short to a String. */ /** Convert a Long to a String. */
String toString(final Long n) { String toString(final Long n) {
return n == null? NULL: Long.toString(n, radix); return n == null? NULL: Long.toString(n, radix);
} }

View File

@ -44,6 +44,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.web.resources.DoAsParam; import org.apache.hadoop.hdfs.web.resources.DoAsParam;
import org.apache.hadoop.hdfs.web.resources.GetOpParam; import org.apache.hadoop.hdfs.web.resources.GetOpParam;
import org.apache.hadoop.hdfs.web.resources.HttpOpParam; import org.apache.hadoop.hdfs.web.resources.HttpOpParam;
import org.apache.hadoop.hdfs.web.resources.NamenodeRpcAddressParam;
import org.apache.hadoop.hdfs.web.resources.PutOpParam; import org.apache.hadoop.hdfs.web.resources.PutOpParam;
import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation;
@ -351,5 +352,31 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest {
{//test append. {//test append.
AppendTestUtil.testAppend(fs, new Path(dir, "append")); AppendTestUtil.testAppend(fs, new Path(dir, "append"));
} }
{//test NamenodeRpcAddressParam not set.
final HttpOpParam.Op op = PutOpParam.Op.CREATE;
final URL url = webhdfs.toUrl(op, dir);
HttpURLConnection conn = (HttpURLConnection) url.openConnection();
conn.setRequestMethod(op.getType().toString());
conn.setDoOutput(false);
conn.setInstanceFollowRedirects(false);
conn.connect();
final String redirect = conn.getHeaderField("Location");
conn.disconnect();
//remove NamenodeRpcAddressParam
WebHdfsFileSystem.LOG.info("redirect = " + redirect);
final int i = redirect.indexOf(NamenodeRpcAddressParam.NAME);
final int j = redirect.indexOf("&", i);
String modified = redirect.substring(0, i - 1) + redirect.substring(j);
WebHdfsFileSystem.LOG.info("modified = " + modified);
//connect to datanode
conn = (HttpURLConnection)new URL(modified).openConnection();
conn.setRequestMethod(op.getType().toString());
conn.setDoOutput(op.getDoOutput());
conn.connect();
assertEquals(HttpServletResponse.SC_BAD_REQUEST, conn.getResponseCode());
}
} }
} }