From 8855a3e0ee65daff9b3425bf81af9f5b0668b57d Mon Sep 17 00:00:00 2001 From: Jing Zhao Date: Wed, 18 Sep 2013 06:26:57 +0000 Subject: [PATCH] HDFS-5212. Merge change r1524298 from trunk. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1524302 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/hadoop/mount/MountResponse.java | 7 +-- .../nfs/nfs3/response/ACCESS3Response.java | 5 +- .../nfs/nfs3/response/COMMIT3Response.java | 5 +- .../nfs/nfs3/response/CREATE3Response.java | 5 +- .../nfs/nfs3/response/FSINFO3Response.java | 5 +- .../nfs/nfs3/response/FSSTAT3Response.java | 5 +- .../nfs/nfs3/response/GETATTR3Response.java | 5 +- .../nfs/nfs3/response/LOOKUP3Response.java | 5 +- .../nfs/nfs3/response/MKDIR3Response.java | 5 +- .../nfs/nfs3/response/NFS3Response.java | 15 ++++-- .../nfs/nfs3/response/PATHCONF3Response.java | 5 +- .../nfs/nfs3/response/READ3Response.java | 5 +- .../nfs/nfs3/response/READDIR3Response.java | 5 +- .../nfs3/response/READDIRPLUS3Response.java | 5 +- .../nfs/nfs3/response/READLINK3Response.java | 5 +- .../nfs/nfs3/response/REMOVE3Response.java | 5 +- .../nfs/nfs3/response/RENAME3Response.java | 5 +- .../nfs/nfs3/response/RMDIR3Response.java | 5 +- .../nfs/nfs3/response/SETATTR3Response.java | 5 +- .../nfs/nfs3/response/SYMLINK3Response.java | 5 +- .../nfs/nfs3/response/VoidResponse.java | 37 -------------- .../nfs/nfs3/response/WRITE3Response.java | 5 +- .../hadoop/oncrpc/RpcAcceptedReply.java | 45 ++++++++--------- .../org/apache/hadoop/oncrpc/RpcCall.java | 48 +++++++++++-------- .../apache/hadoop/oncrpc/RpcDeniedReply.java | 28 +++++------ .../org/apache/hadoop/oncrpc/RpcMessage.java | 2 + .../org/apache/hadoop/oncrpc/RpcProgram.java | 9 +++- .../org/apache/hadoop/oncrpc/RpcReply.java | 28 +++++++---- .../hadoop/oncrpc/security/Credentials.java | 16 +++++++ .../hadoop/oncrpc/security/Verifier.java | 25 ++++++++-- .../apache/hadoop/portmap/PortmapRequest.java | 15 ++---- .../hadoop/portmap/PortmapResponse.java | 9 ++-- .../hadoop/portmap/RpcProgramPortmap.java | 6 ++- .../hadoop/oncrpc/TestFrameDecoder.java | 8 +++- .../hadoop/oncrpc/TestRpcAcceptedReply.java | 2 +- .../hadoop/oncrpc/TestRpcDeniedReply.java | 7 ++- .../apache/hadoop/oncrpc/TestRpcMessage.java | 5 +- .../apache/hadoop/oncrpc/TestRpcReply.java | 9 +++- .../hdfs/nfs/mount/RpcProgramMountd.java | 14 ++++-- .../hadoop/hdfs/nfs/nfs3/OpenFileCtx.java | 27 +++++++---- .../hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java | 23 +++++---- .../hadoop/hdfs/nfs/nfs3/WriteManager.java | 13 +++-- .../hadoop/hdfs/nfs/TestOutOfOrderWrite.java | 25 ++++------ .../hadoop/hdfs/nfs/TestPortmapRegister.java | 9 ++-- .../apache/hadoop/hdfs/nfs/TestUdpServer.java | 5 +- hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt | 3 ++ 46 files changed, 304 insertions(+), 231 deletions(-) delete mode 100644 hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/VoidResponse.java diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java index dd837f54121..88b023c6819 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/mount/MountResponse.java @@ -22,6 +22,7 @@ import org.apache.hadoop.nfs.NfsExports; import org.apache.hadoop.oncrpc.RpcAcceptedReply; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor; /** @@ -37,7 +38,7 @@ private MountResponse() { /** Response for RPC call {@link MountInterface.MNTPROC#MNT} */ public static XDR writeMNTResponse(int status, XDR xdr, int xid, byte[] handle) { - RpcAcceptedReply.voidReply(xdr, xid); + RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr); xdr.writeInt(status); if (status == MNT_OK) { xdr.writeVariableOpaque(handle); @@ -50,7 +51,7 @@ public static XDR writeMNTResponse(int status, XDR xdr, int xid, /** Response for RPC call {@link MountInterface.MNTPROC#DUMP} */ public static XDR writeMountList(XDR xdr, int xid, List mounts) { - RpcAcceptedReply.voidReply(xdr, xid); + RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr); for (MountEntry mountEntry : mounts) { xdr.writeBoolean(true); // Value follows yes xdr.writeString(mountEntry.host()); @@ -65,7 +66,7 @@ public static XDR writeExportList(XDR xdr, int xid, List exports, List hostMatcher) { assert (exports.size() == hostMatcher.size()); - RpcAcceptedReply.voidReply(xdr, xid); + RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr); for (int i = 0; i < exports.size(); i++) { xdr.writeBoolean(true); // Value follows - yes xdr.writeString(exports.get(i)); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java index 88554fc26d6..8ba07aa5b37 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/ACCESS3Response.java @@ -20,6 +20,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * ACCESS3 Response @@ -43,8 +44,8 @@ public ACCESS3Response(int status, Nfs3FileAttributes postOpAttr, int access) { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); out.writeBoolean(true); postOpAttr.serialize(out); if (this.getStatus() == Nfs3Status.NFS3_OK) { diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java index 86caf46608f..fd90b187d5f 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/COMMIT3Response.java @@ -20,6 +20,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3Constant; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * COMMIT3 Response @@ -47,8 +48,8 @@ public long getVerf() { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); fileWcc.serialize(out); if (getStatus() == Nfs3Status.NFS3_OK) { out.writeLongAsHyper(verf); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java index 7edb115ce34..593cd82f2e9 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/CREATE3Response.java @@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * CREATE3 Response @@ -55,8 +56,8 @@ public WccData getDirWcc() { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); if (getStatus() == Nfs3Status.NFS3_OK) { out.writeBoolean(true); // Handle follows objHandle.serialize(out); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java index 889b57a6912..ed301ac167c 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSINFO3Response.java @@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * FSINFO3 Response @@ -109,8 +110,8 @@ public FSINFO3Response(int status, Nfs3FileAttributes postOpAttr, int rtmax, } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); out.writeBoolean(true); postOpAttr.serialize(out); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java index e11556eae28..0dd3f73b651 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/FSSTAT3Response.java @@ -20,6 +20,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * FSSTAT3 Response @@ -90,8 +91,8 @@ public FSSTAT3Response(int status, Nfs3FileAttributes postOpAttr, } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); out.writeBoolean(true); if (postOpAttr == null) { postOpAttr = new Nfs3FileAttributes(); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java index e0de49ca5ea..b9bb3f011d1 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/GETATTR3Response.java @@ -20,6 +20,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * GETATTR3 Response @@ -40,8 +41,8 @@ public void setPostOpAttr(Nfs3FileAttributes postOpAttr) { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); if (getStatus() == Nfs3Status.NFS3_OK) { postOpAttr.serialize(out); } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java index 27d82c0ef87..5d33f985272 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/LOOKUP3Response.java @@ -23,6 +23,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * LOOKUP3 Response @@ -61,8 +62,8 @@ public LOOKUP3Response(XDR xdr) throws IOException { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); if (this.status == Nfs3Status.NFS3_OK) { fileHandle.serialize(out); out.writeBoolean(true); // Attribute follows diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java index fb0cdc15fb3..0e0980a53a8 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/MKDIR3Response.java @@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * MKDIR3 Response @@ -55,8 +56,8 @@ public WccData getDirWcc() { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); if (getStatus() == Nfs3Status.NFS3_OK) { out.writeBoolean(true); // Handle follows objFileHandle.serialize(out); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java index 6d8b640c424..e30af82d964 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/NFS3Response.java @@ -19,11 +19,13 @@ import org.apache.hadoop.oncrpc.RpcAcceptedReply; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** - * Abstract class for a NFSv3 response + * Base class for a NFSv3 response. This class and its subclasses contain + * the response from NFSv3 handlers. */ -abstract public class NFS3Response { +public class NFS3Response { protected int status; public NFS3Response(int status) { @@ -38,8 +40,13 @@ public void setStatus(int status) { this.status = status; } - public XDR send(XDR out, int xid) { - RpcAcceptedReply.voidReply(out, xid); + /** + * Write the response, along with the rpc header (including verifier), to the + * XDR. + */ + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + RpcAcceptedReply reply = RpcAcceptedReply.getAcceptInstance(xid, verifier); + reply.write(out); out.writeInt(this.getStatus()); return out; } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java index 1b8f004da5c..e4578a06c57 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/PATHCONF3Response.java @@ -20,6 +20,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * PATHCONF3 Response @@ -77,8 +78,8 @@ public PATHCONF3Response(int status, Nfs3FileAttributes postOpAttr, } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); out.writeBoolean(true); postOpAttr.serialize(out); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java index 2bbee947f22..2524ca0c6d8 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READ3Response.java @@ -22,6 +22,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * READ3 Response @@ -62,8 +63,8 @@ public ByteBuffer getData() { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); out.writeBoolean(true); // Attribute follows postOpAttr.serialize(out); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java index 9f8d6760b5b..0f0138abc1a 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIR3Response.java @@ -24,6 +24,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * READDIR3 Response @@ -96,8 +97,8 @@ public DirList3 getDirList() { } @Override - public XDR send(XDR xdr, int xid) { - super.send(xdr, xid); + public XDR writeHeaderAndResponse(XDR xdr, int xid, Verifier verifier) { + super.writeHeaderAndResponse(xdr, xid, verifier); xdr.writeBoolean(true); // Attributes follow postOpDirAttr.serialize(xdr); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java index 6b41cb27f7a..68647a398c8 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READDIRPLUS3Response.java @@ -25,6 +25,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * READDIRPLUS3 Response @@ -92,8 +93,8 @@ public READDIRPLUS3Response(int status, Nfs3FileAttributes postOpDirAttr, } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); out.writeBoolean(true); // attributes follow if (postOpDirAttr == null) { postOpDirAttr = new Nfs3FileAttributes(); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java index 758895c588f..fe5429640b2 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/READLINK3Response.java @@ -20,6 +20,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * READLINK3 Response @@ -41,8 +42,8 @@ public READLINK3Response(int status, Nfs3FileAttributes postOpAttr, } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); out.writeBoolean(true); // Attribute follows postOpSymlinkAttr.serialize(out); if (getStatus() == Nfs3Status.NFS3_OK) { diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java index 955f0edf9a8..55b880b2cf7 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/REMOVE3Response.java @@ -18,6 +18,7 @@ package org.apache.hadoop.nfs.nfs3.response; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * REMOVE3 Response @@ -35,8 +36,8 @@ public REMOVE3Response(int status, WccData dirWcc) { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); if (dirWcc == null) { dirWcc = new WccData(null, null); } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java index 35a6409e926..4e0f4c03bba 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RENAME3Response.java @@ -18,6 +18,7 @@ package org.apache.hadoop.nfs.nfs3.response; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * RENAME3 Response @@ -45,8 +46,8 @@ public WccData getToDirWcc() { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); fromDirWcc.serialize(out); toDirWcc.serialize(out); return out; diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java index 92d7afd1089..e7b066481fe 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/RMDIR3Response.java @@ -18,6 +18,7 @@ package org.apache.hadoop.nfs.nfs3.response; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * RMDIR3 Response @@ -39,8 +40,8 @@ public WccData getDirWcc() { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); dirWcc.serialize(out); return out; } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java index 27cebac47b4..eda06d437fa 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SETATTR3Response.java @@ -18,6 +18,7 @@ package org.apache.hadoop.nfs.nfs3.response; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * SETATTR3 Response @@ -39,8 +40,8 @@ public WccData getWccData() { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); wccData.serialize(out); return out; } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java index a1d245c8eac..d8e3441e554 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/SYMLINK3Response.java @@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes; import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * SYMLINK3 Response @@ -55,8 +56,8 @@ public WccData getDirWcc() { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); if (this.getStatus() == Nfs3Status.NFS3_OK) { out.writeBoolean(true); objFileHandle.serialize(out); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/VoidResponse.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/VoidResponse.java deleted file mode 100644 index 3e039027855..00000000000 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/VoidResponse.java +++ /dev/null @@ -1,37 +0,0 @@ -/** - * Licensed to the Apache Software Foundation (ASF) under one - * or more contributor license agreements. See the NOTICE file - * distributed with this work for additional information - * regarding copyright ownership. The ASF licenses this file - * to you under the Apache License, Version 2.0 (the - * "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ -package org.apache.hadoop.nfs.nfs3.response; - -import org.apache.hadoop.oncrpc.RpcAcceptedReply; -import org.apache.hadoop.oncrpc.XDR; - -/** - * A void NFSv3 response - */ -public class VoidResponse extends NFS3Response { - - public VoidResponse(int status) { - super(status); - } - - @Override - public XDR send(XDR out, int xid) { - RpcAcceptedReply.voidReply(out, xid); - return out; - } -} diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java index d1b8f00ccfd..f33c4de2c58 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/nfs/nfs3/response/WRITE3Response.java @@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3Status; import org.apache.hadoop.nfs.nfs3.Nfs3Constant.WriteStableHow; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.Verifier; /** * WRITE3 Response @@ -58,8 +59,8 @@ public long getVerifer() { } @Override - public XDR send(XDR out, int xid) { - super.send(out, xid); + public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) { + super.writeHeaderAndResponse(out, xid, verifier); fileWcc.serialize(out); if (getStatus() == Nfs3Status.NFS3_OK) { out.writeInt(count); diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcAcceptedReply.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcAcceptedReply.java index c909c283d7d..9b6b57b9979 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcAcceptedReply.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcAcceptedReply.java @@ -18,8 +18,6 @@ package org.apache.hadoop.oncrpc; import org.apache.hadoop.oncrpc.security.Verifier; -import org.apache.hadoop.oncrpc.security.RpcAuthInfo; -import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor; /** * Represents RPC message MSG_ACCEPTED reply body. See RFC 1831 for details. @@ -43,43 +41,42 @@ public int getValue() { return ordinal(); } }; + + public static RpcAcceptedReply getAcceptInstance(int xid, + Verifier verifier) { + return getInstance(xid, AcceptState.SUCCESS, verifier); + } + + public static RpcAcceptedReply getInstance(int xid, AcceptState state, + Verifier verifier) { + return new RpcAcceptedReply(xid, ReplyState.MSG_ACCEPTED, verifier, + state); + } - private final RpcAuthInfo verifier; private final AcceptState acceptState; - RpcAcceptedReply(int xid, RpcMessage.Type messageType, ReplyState state, - RpcAuthInfo verifier, AcceptState acceptState) { - super(xid, messageType, state); - this.verifier = verifier; + RpcAcceptedReply(int xid, ReplyState state, Verifier verifier, + AcceptState acceptState) { + super(xid, state, verifier); this.acceptState = acceptState; } - public static RpcAcceptedReply read(int xid, RpcMessage.Type messageType, - ReplyState replyState, XDR xdr) { + public static RpcAcceptedReply read(int xid, ReplyState replyState, XDR xdr) { Verifier verifier = Verifier.readFlavorAndVerifier(xdr); AcceptState acceptState = AcceptState.fromValue(xdr.readInt()); - return new RpcAcceptedReply(xid, messageType, replyState, verifier, - acceptState); - } - - public RpcAuthInfo getVerifier() { - return verifier; + return new RpcAcceptedReply(xid, replyState, verifier, acceptState); } public AcceptState getAcceptState() { return acceptState; } - public static XDR voidReply(XDR xdr, int xid) { - return voidReply(xdr, xid, AcceptState.SUCCESS); - } - - public static XDR voidReply(XDR xdr, int xid, AcceptState acceptState) { + @Override + public XDR write(XDR xdr) { xdr.writeInt(xid); - xdr.writeInt(RpcMessage.Type.RPC_REPLY.getValue()); - xdr.writeInt(ReplyState.MSG_ACCEPTED.getValue()); - xdr.writeInt(AuthFlavor.AUTH_NONE.getValue()); - xdr.writeVariableOpaque(new byte[0]); + xdr.writeInt(messageType.getValue()); + xdr.writeInt(replyState.getValue()); + Verifier.writeFlavorAndVerifier(verifier, xdr); xdr.writeInt(acceptState.getValue()); return xdr; } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java index 4c872da8557..aa4b948d582 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcCall.java @@ -28,11 +28,25 @@ public class RpcCall extends RpcMessage { public static final int RPC_VERSION = 2; private static final Log LOG = LogFactory.getLog(RpcCall.class); + + public static RpcCall read(XDR xdr) { + return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()), + xdr.readInt(), xdr.readInt(), xdr.readInt(), xdr.readInt(), + Credentials.readFlavorAndCredentials(xdr), + Verifier.readFlavorAndVerifier(xdr)); + } + + public static RpcCall getInstance(int xid, int program, int version, + int procedure, Credentials cred, Verifier verifier) { + return new RpcCall(xid, RpcMessage.Type.RPC_CALL, 2, program, version, + procedure, cred, verifier); + } + private final int rpcVersion; private final int program; private final int version; private final int procedure; - private final Credentials credential; + private final Credentials credentials; private final Verifier verifier; protected RpcCall(int xid, RpcMessage.Type messageType, int rpcVersion, @@ -43,7 +57,7 @@ protected RpcCall(int xid, RpcMessage.Type messageType, int rpcVersion, this.program = program; this.version = version; this.procedure = procedure; - this.credential = credential; + this.credentials = credential; this.verifier = verifier; if (LOG.isTraceEnabled()) { LOG.trace(this); @@ -83,28 +97,24 @@ public int getProcedure() { } public Credentials getCredential() { - return credential; + return credentials; } public Verifier getVerifier() { return verifier; } - public static RpcCall read(XDR xdr) { - return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()), - xdr.readInt(), xdr.readInt(), xdr.readInt(), xdr.readInt(), - Credentials.readFlavorAndCredentials(xdr), - Verifier.readFlavorAndVerifier(xdr)); - } - - public static void write(XDR out, int xid, int program, int progVersion, - int procedure) { - out.writeInt(xid); - out.writeInt(RpcMessage.Type.RPC_CALL.getValue()); - out.writeInt(2); - out.writeInt(program); - out.writeInt(progVersion); - out.writeInt(procedure); + @Override + public XDR write(XDR xdr) { + xdr.writeInt(xid); + xdr.writeInt(RpcMessage.Type.RPC_CALL.getValue()); + xdr.writeInt(2); + xdr.writeInt(program); + xdr.writeInt(version); + xdr.writeInt(procedure); + Credentials.writeFlavorAndCredentials(credentials, xdr); + Verifier.writeFlavorAndVerifier(verifier, xdr); + return xdr; } @Override @@ -112,6 +122,6 @@ public String toString() { return String.format("Xid:%d, messageType:%s, rpcVersion:%d, program:%d," + " version:%d, procedure:%d, credential:%s, verifier:%s", xid, messageType, rpcVersion, program, version, procedure, - credential.toString(), verifier.toString()); + credentials.toString(), verifier.toString()); } } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java index 8a9af096d1f..2c4a0946c0e 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcDeniedReply.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.oncrpc; -import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor; +import org.apache.hadoop.oncrpc.security.Verifier; /** * Represents RPC message MSG_DENIED reply body. See RFC 1831 for details. @@ -40,16 +40,16 @@ static RejectState fromValue(int value) { private final RejectState rejectState; - RpcDeniedReply(int xid, RpcMessage.Type messageType, ReplyState replyState, - RejectState rejectState) { - super(xid, messageType, replyState); + public RpcDeniedReply(int xid, ReplyState replyState, + RejectState rejectState, Verifier verifier) { + super(xid, replyState, verifier); this.rejectState = rejectState; } - public static RpcDeniedReply read(int xid, RpcMessage.Type messageType, - ReplyState replyState, XDR xdr) { + public static RpcDeniedReply read(int xid, ReplyState replyState, XDR xdr) { + Verifier verifier = Verifier.readFlavorAndVerifier(xdr); RejectState rejectState = RejectState.fromValue(xdr.readInt()); - return new RpcDeniedReply(xid, messageType, replyState, rejectState); + return new RpcDeniedReply(xid, replyState, rejectState, verifier); } public RejectState getRejectState() { @@ -59,17 +59,17 @@ public RejectState getRejectState() { @Override public String toString() { return new StringBuffer().append("xid:").append(xid) - .append(",messageType:").append(messageType).append("rejectState:") + .append(",messageType:").append(messageType).append("verifier_flavor:") + .append(verifier.getFlavor()).append("rejectState:") .append(rejectState).toString(); } - public static XDR voidReply(XDR xdr, int xid, ReplyState msgAccepted, - RejectState rejectState) { + @Override + public XDR write(XDR xdr) { xdr.writeInt(xid); - xdr.writeInt(RpcMessage.Type.RPC_REPLY.getValue()); - xdr.writeInt(msgAccepted.getValue()); - xdr.writeInt(AuthFlavor.AUTH_NONE.getValue()); - xdr.writeVariableOpaque(new byte[0]); + xdr.writeInt(messageType.getValue()); + xdr.writeInt(replyState.getValue()); + Verifier.writeFlavorAndVerifier(verifier, xdr); xdr.writeInt(rejectState.getValue()); return xdr; } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcMessage.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcMessage.java index 47af6f706ed..fff015e9fb1 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcMessage.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcMessage.java @@ -50,6 +50,8 @@ public static Type fromValue(int value) { this.messageType = messageType; } + public abstract XDR write(XDR xdr); + public int getXid() { return xid; } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java index d82e624265a..d457b3aaa91 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcProgram.java @@ -24,6 +24,7 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState; import org.apache.hadoop.oncrpc.RpcCallCache.CacheEntry; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.apache.hadoop.portmap.PortmapMapping; import org.apache.hadoop.portmap.PortmapRequest; import org.jboss.netty.channel.Channel; @@ -163,13 +164,17 @@ public XDR handle(XDR xdr, InetAddress client, Channel channel) { private XDR programMismatch(XDR out, RpcCall call) { LOG.warn("Invalid RPC call program " + call.getProgram()); - RpcAcceptedReply.voidReply(out, call.getXid(), AcceptState.PROG_UNAVAIL); + RpcAcceptedReply reply = RpcAcceptedReply.getInstance(call.getXid(), + AcceptState.PROG_UNAVAIL, new VerifierNone()); + reply.write(out); return out; } private XDR programVersionMismatch(XDR out, RpcCall call) { LOG.warn("Invalid RPC call version " + call.getVersion()); - RpcAcceptedReply.voidReply(out, call.getXid(), AcceptState.PROG_MISMATCH); + RpcAcceptedReply reply = RpcAcceptedReply.getInstance(call.getXid(), + AcceptState.PROG_MISMATCH, new VerifierNone()); + reply.write(out); out.writeInt(lowProgVersion); out.writeInt(highProgVersion); return out; diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcReply.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcReply.java index 5d0bc7e98ff..e866a5c419c 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcReply.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/RpcReply.java @@ -17,6 +17,11 @@ */ package org.apache.hadoop.oncrpc; +import org.apache.hadoop.oncrpc.security.RpcAuthInfo; +import org.apache.hadoop.oncrpc.security.Verifier; + +import com.google.common.base.Preconditions; + /** * Represents an RPC message of type RPC reply as defined in RFC 1831 */ @@ -36,28 +41,35 @@ public static ReplyState fromValue(int value) { } } - private final ReplyState state; + protected final ReplyState replyState; + protected final Verifier verifier; - RpcReply(int xid, RpcMessage.Type messageType, ReplyState state) { - super(xid, messageType); - this.state = state; - validateMessageType(RpcMessage.Type.RPC_REPLY); + RpcReply(int xid, ReplyState state, Verifier verifier) { + super(xid, RpcMessage.Type.RPC_REPLY); + this.replyState = state; + this.verifier = verifier; + } + + public RpcAuthInfo getVerifier() { + return verifier; } public static RpcReply read(XDR xdr) { int xid = xdr.readInt(); final Type messageType = Type.fromValue(xdr.readInt()); + Preconditions.checkState(messageType == RpcMessage.Type.RPC_REPLY); + ReplyState stat = ReplyState.fromValue(xdr.readInt()); switch (stat) { case MSG_ACCEPTED: - return RpcAcceptedReply.read(xid, messageType, stat, xdr); + return RpcAcceptedReply.read(xid, stat, xdr); case MSG_DENIED: - return RpcDeniedReply.read(xid, messageType, stat, xdr); + return RpcDeniedReply.read(xid, stat, xdr); } return null; } public ReplyState getState() { - return state; + return replyState; } } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java index 8f641d885aa..29c706b958f 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Credentials.java @@ -45,6 +45,22 @@ public static Credentials readFlavorAndCredentials(XDR xdr) { return credentials; } + /** + * Write AuthFlavor and the credentials to the XDR + */ + public static void writeFlavorAndCredentials(Credentials cred, XDR xdr) { + if (cred instanceof CredentialsNone) { + xdr.writeInt(AuthFlavor.AUTH_NONE.getValue()); + } else if (cred instanceof CredentialsSys) { + xdr.writeInt(AuthFlavor.AUTH_SYS.getValue()); + } else if (cred instanceof CredentialsGSS) { + xdr.writeInt(AuthFlavor.RPCSEC_GSS.getValue()); + } else { + throw new UnsupportedOperationException("Cannot recognize the verifier"); + } + cred.write(xdr); + } + protected int mCredentialsLength; protected Credentials(AuthFlavor flavor) { diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java index f8344b2800d..5184e94f29a 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/oncrpc/security/Verifier.java @@ -20,10 +20,11 @@ import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor; -/** - * Base class for verifier. Currently we only support 3 types of auth flavors: - * {@link AuthFlavor#AUTH_NONE}, {@link AuthFlavor#AUTH_SYS}, - * and {@link AuthFlavor#RPCSEC_GSS}. +/** + * Base class for verifier. Currently our authentication only supports 3 types + * of auth flavors: {@link AuthFlavor#AUTH_NONE}, {@link AuthFlavor#AUTH_SYS}, + * and {@link AuthFlavor#RPCSEC_GSS}. Thus for verifier we only need to handle + * AUTH_NONE and RPCSEC_GSS */ public abstract class Verifier extends RpcAuthInfo { @@ -31,6 +32,7 @@ protected Verifier(AuthFlavor flavor) { super(flavor); } + /** Read both AuthFlavor and the verifier from the XDR */ public static Verifier readFlavorAndVerifier(XDR xdr) { AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt()); final Verifier verifer; @@ -46,4 +48,19 @@ public static Verifier readFlavorAndVerifier(XDR xdr) { return verifer; } + /** + * Write AuthFlavor and the verifier to the XDR + */ + public static void writeFlavorAndVerifier(Verifier verifier, XDR xdr) { + if (verifier instanceof VerifierNone) { + xdr.writeInt(AuthFlavor.AUTH_NONE.getValue()); + } else if (verifier instanceof VerifierGSS) { + xdr.writeInt(AuthFlavor.RPCSEC_GSS.getValue()); + } else { + throw new UnsupportedOperationException("Cannot recognize the verifier"); + } + verifier.write(xdr); + } + + } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java index 98dd1ffa635..943b4abc5c5 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapRequest.java @@ -21,10 +21,7 @@ import org.apache.hadoop.oncrpc.RpcUtil; import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.security.CredentialsNone; -import org.apache.hadoop.oncrpc.security.Credentials; -import org.apache.hadoop.oncrpc.security.Verifier; import org.apache.hadoop.oncrpc.security.VerifierNone; -import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor; import org.apache.hadoop.portmap.PortmapInterface.Procedure; /** @@ -37,16 +34,12 @@ public static PortmapMapping mapping(XDR xdr) { public static XDR create(PortmapMapping mapping) { XDR request = new XDR(); - RpcCall.write(request, + RpcCall call = RpcCall.getInstance( RpcUtil.getNewXid(String.valueOf(RpcProgramPortmap.PROGRAM)), RpcProgramPortmap.PROGRAM, RpcProgramPortmap.VERSION, - Procedure.PMAPPROC_SET.getValue()); - request.writeInt(AuthFlavor.AUTH_NONE.getValue()); - Credentials credential = new CredentialsNone(); - credential.write(request); - request.writeInt(AuthFlavor.AUTH_NONE.getValue()); - Verifier verifier = new VerifierNone(); - verifier.write(request); + Procedure.PMAPPROC_SET.getValue(), new CredentialsNone(), + new VerifierNone()); + call.write(request); return mapping.serialize(request); } } diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapResponse.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapResponse.java index f650a74994e..cbbe1358bb7 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapResponse.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/PortmapResponse.java @@ -22,30 +22,31 @@ import org.apache.hadoop.oncrpc.RpcAcceptedReply; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.VerifierNone; /** * Helper utility for sending portmap response. */ public class PortmapResponse { public static XDR voidReply(XDR xdr, int xid) { - RpcAcceptedReply.voidReply(xdr, xid); + RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr); return xdr; } public static XDR intReply(XDR xdr, int xid, int value) { - RpcAcceptedReply.voidReply(xdr, xid); + RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr); xdr.writeInt(value); return xdr; } public static XDR booleanReply(XDR xdr, int xid, boolean value) { - RpcAcceptedReply.voidReply(xdr, xid); + RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr); xdr.writeBoolean(value); return xdr; } public static XDR pmapList(XDR xdr, int xid, Collection list) { - RpcAcceptedReply.voidReply(xdr, xid); + RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr); for (PortmapMapping mapping : list) { System.out.println(mapping); xdr.writeBoolean(true); // Value follows diff --git a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java index c715d33e3cb..46e602c8626 100644 --- a/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java +++ b/hadoop-common-project/hadoop-nfs/src/main/java/org/apache/hadoop/portmap/RpcProgramPortmap.java @@ -28,6 +28,7 @@ import org.apache.hadoop.oncrpc.RpcCall; import org.apache.hadoop.oncrpc.RpcProgram; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.jboss.netty.channel.Channel; /** @@ -147,8 +148,9 @@ public XDR handleInternal(RpcCall rpcCall, XDR in, XDR out, out = getport(xid, in, out); } else { LOG.info("PortmapHandler unknown rpc procedure=" + portmapProc); - RpcAcceptedReply.voidReply(out, xid, - RpcAcceptedReply.AcceptState.PROC_UNAVAIL); + RpcAcceptedReply.getInstance(xid, + RpcAcceptedReply.AcceptState.PROC_UNAVAIL, new VerifierNone()).write( + out); } return out; } diff --git a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java index fab80ddcb65..189c24d6035 100644 --- a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java +++ b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestFrameDecoder.java @@ -24,6 +24,8 @@ import java.net.InetAddress; import java.nio.ByteBuffer; +import org.apache.hadoop.oncrpc.security.CredentialsNone; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.jboss.netty.buffer.ByteBufferBackedChannelBuffer; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; @@ -55,7 +57,8 @@ public XDR handleInternal(RpcCall rpcCall, XDR in, XDR out, InetAddress client, Channel channel) { // Get the final complete request and return a void response. result = in; - return RpcAcceptedReply.voidReply(out, 1234); + RpcAcceptedReply.getAcceptInstance(1234, new VerifierNone()).write(out); + return out; } @Override @@ -161,7 +164,8 @@ public void testFrames() { static void createPortmapXDRheader(XDR xdr_out, int procedure) { // Make this a method - RpcCall.write(xdr_out, 0, 100000, 2, procedure); + RpcCall.getInstance(0, 100000, 2, procedure, new CredentialsNone(), + new VerifierNone()).write(xdr_out); } static XDR createGetportMount() { diff --git a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcAcceptedReply.java b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcAcceptedReply.java index 2daa48cce83..7fd6d5256f9 100644 --- a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcAcceptedReply.java +++ b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcAcceptedReply.java @@ -47,7 +47,7 @@ public void testAcceptStateFromInvalidValue() { @Test public void testConstructor() { Verifier verifier = new VerifierNone(); - RpcAcceptedReply reply = new RpcAcceptedReply(0, RpcMessage.Type.RPC_REPLY, + RpcAcceptedReply reply = new RpcAcceptedReply(0, ReplyState.MSG_ACCEPTED, verifier, AcceptState.SUCCESS); assertEquals(0, reply.getXid()); assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType()); diff --git a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcDeniedReply.java b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcDeniedReply.java index 1976c47dc9c..53ba04e11f5 100644 --- a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcDeniedReply.java +++ b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcDeniedReply.java @@ -19,6 +19,7 @@ import org.apache.hadoop.oncrpc.RpcDeniedReply.RejectState; import org.apache.hadoop.oncrpc.RpcReply.ReplyState; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.junit.Assert; import org.junit.Test; @@ -39,10 +40,8 @@ public void testRejectStateFromInvalidValue1() { @Test public void testConstructor() { - RpcDeniedReply reply = new RpcDeniedReply(0, RpcMessage.Type.RPC_REPLY, - ReplyState.MSG_ACCEPTED, RejectState.AUTH_ERROR) { - // Anonymous class - }; + RpcDeniedReply reply = new RpcDeniedReply(0, ReplyState.MSG_ACCEPTED, + RejectState.AUTH_ERROR, new VerifierNone()); Assert.assertEquals(0, reply.getXid()); Assert.assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType()); Assert.assertEquals(ReplyState.MSG_ACCEPTED, reply.getState()); diff --git a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcMessage.java b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcMessage.java index ce68f0cbd08..23674e3e230 100644 --- a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcMessage.java +++ b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcMessage.java @@ -26,7 +26,10 @@ public class TestRpcMessage { private RpcMessage getRpcMessage(int xid, RpcMessage.Type msgType) { return new RpcMessage(xid, msgType) { - // Anonymous class + @Override + public XDR write(XDR xdr) { + return null; + } }; } diff --git a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcReply.java b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcReply.java index aa4d843a4af..5baa93485a9 100644 --- a/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcReply.java +++ b/hadoop-common-project/hadoop-nfs/src/test/java/org/apache/hadoop/oncrpc/TestRpcReply.java @@ -19,6 +19,7 @@ import org.apache.hadoop.oncrpc.RpcReply.ReplyState; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.junit.Assert; import org.junit.Test; @@ -39,8 +40,12 @@ public void testReplyStateFromInvalidValue1() { @Test public void testRpcReply() { - RpcReply reply = new RpcReply(0, RpcMessage.Type.RPC_REPLY, ReplyState.MSG_ACCEPTED) { - // Anonymous class + RpcReply reply = new RpcReply(0, ReplyState.MSG_ACCEPTED, + new VerifierNone()) { + @Override + public XDR write(XDR xdr) { + return null; + } }; Assert.assertEquals(0, reply.getXid()); Assert.assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType()); diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/mount/RpcProgramMountd.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/mount/RpcProgramMountd.java index 1ff7f3f45b4..8d679ecc736 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/mount/RpcProgramMountd.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/mount/RpcProgramMountd.java @@ -40,6 +40,7 @@ import org.apache.hadoop.oncrpc.RpcCall; import org.apache.hadoop.oncrpc.RpcProgram; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.jboss.netty.channel.Channel; /** @@ -88,7 +89,8 @@ public XDR nullOp(XDR out, int xid, InetAddress client) { if (LOG.isDebugEnabled()) { LOG.debug("MOUNT NULLOP : " + " client: " + client); } - return RpcAcceptedReply.voidReply(out, xid); + return RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write( + out); } @Override @@ -155,7 +157,7 @@ public XDR umnt(XDR xdr, XDR out, int xid, InetAddress client) { String host = client.getHostName(); mounts.remove(new MountEntry(host, path)); - RpcAcceptedReply.voidReply(out, xid); + RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(out); return out; } @@ -165,7 +167,8 @@ public XDR umntall(XDR out, int xid, InetAddress client) { LOG.debug("MOUNT UMNTALL : " + " client: " + client); } mounts.clear(); - return RpcAcceptedReply.voidReply(out, xid); + return RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write( + out); } @Override @@ -190,8 +193,9 @@ public XDR handleInternal(RpcCall rpcCall, XDR xdr, XDR out, out = MountResponse.writeExportList(out, xid, exports, hostsMatchers); } else { // Invalid procedure - RpcAcceptedReply.voidReply(out, xid, - RpcAcceptedReply.AcceptState.PROC_UNAVAIL); + RpcAcceptedReply.getInstance(xid, + RpcAcceptedReply.AcceptState.PROC_UNAVAIL, new VerifierNone()).write( + out); } return out; } diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java index bc4b0d21ad5..e4e1a87753d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/OpenFileCtx.java @@ -49,6 +49,7 @@ import org.apache.hadoop.nfs.nfs3.response.WccAttr; import org.apache.hadoop.nfs.nfs3.response.WccData; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.jboss.netty.channel.Channel; /** @@ -293,7 +294,8 @@ public void receivedNewWrite(DFSClient dfsClient, WRITE3Request request, WccData fileWcc = new WccData(latestAttr.getWccAttr(), latestAttr); WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO, fileWcc, 0, request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); } else { // Handle repeated write requests(same xid or not). // If already replied, send reply again. If not replied, drop the @@ -315,7 +317,8 @@ public void receivedNewWrite(DFSClient dfsClient, WRITE3Request request, WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK, fileWcc, request.getCount(), request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); } updateLastAccessTime(); @@ -369,7 +372,8 @@ private void receivedNewWriteInternal(DFSClient dfsClient, WccData fileWcc = new WccData(preOpAttr, postOpAttr); WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK, fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); writeCtx.setReplied(true); } @@ -394,7 +398,8 @@ private void receivedNewWriteInternal(DFSClient dfsClient, WccData fileWcc = new WccData(preOpAttr, postOpAttr); WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK, fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); writeCtx.setReplied(true); } @@ -420,7 +425,8 @@ private void receivedNewWriteInternal(DFSClient dfsClient, } updateLastAccessTime(); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); } } @@ -715,7 +721,8 @@ private void doSingleWrite(final WriteCtx writeCtx) { WccData fileWcc = new WccData(preOpAttr, latestAttr); WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK, fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); } } catch (IOException e) { @@ -723,7 +730,8 @@ private void doSingleWrite(final WriteCtx writeCtx) { + offset + " and length " + data.length, e); if (!writeCtx.getReplied()) { WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); // Keep stream open. Either client retries or SteamMonitor closes it. } @@ -760,8 +768,9 @@ private void cleanup() { WccData fileWcc = new WccData(preOpAttr, latestAttr); WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO, fileWcc, 0, writeCtx.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF); - Nfs3Utils.writeChannel(writeCtx.getChannel(), - response.send(new XDR(), writeCtx.getXid()), writeCtx.getXid()); + Nfs3Utils.writeChannel(writeCtx.getChannel(), response + .writeHeaderAndResponse(new XDR(), writeCtx.getXid(), + new VerifierNone()), writeCtx.getXid()); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java index 576bee1dfe8..16153c6faf5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/RpcProgramNfs3.java @@ -98,7 +98,6 @@ import org.apache.hadoop.nfs.nfs3.response.RMDIR3Response; import org.apache.hadoop.nfs.nfs3.response.SETATTR3Response; import org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response; -import org.apache.hadoop.nfs.nfs3.response.VoidResponse; import org.apache.hadoop.nfs.nfs3.response.WRITE3Response; import org.apache.hadoop.nfs.nfs3.response.WccAttr; import org.apache.hadoop.nfs.nfs3.response.WccData; @@ -108,12 +107,13 @@ import org.apache.hadoop.oncrpc.RpcProgram; import org.apache.hadoop.oncrpc.RpcReply; import org.apache.hadoop.oncrpc.XDR; -import org.apache.hadoop.oncrpc.security.CredentialsSys; import org.apache.hadoop.oncrpc.security.Credentials; -import org.apache.hadoop.oncrpc.security.Verifier; +import org.apache.hadoop.oncrpc.security.CredentialsSys; +import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor; import org.apache.hadoop.oncrpc.security.SecurityHandler; import org.apache.hadoop.oncrpc.security.SysSecurityHandler; -import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor; +import org.apache.hadoop.oncrpc.security.Verifier; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.apache.hadoop.security.AccessControlException; import org.jboss.netty.channel.Channel; @@ -209,7 +209,7 @@ public NFS3Response nullProcedure() { if (LOG.isDebugEnabled()) { LOG.debug("NFS NULL"); } - return new VoidResponse(Nfs3Status.NFS3_OK); + return new NFS3Response(Nfs3Status.NFS3_OK); } @Override @@ -1790,9 +1790,10 @@ public XDR handleInternal(RpcCall rpcCall, final XDR xdr, XDR out, + rpcCall.getCredential().getFlavor() + " is not AUTH_SYS or RPCSEC_GSS."); XDR reply = new XDR(); - reply = RpcDeniedReply.voidReply(reply, xid, + RpcDeniedReply rdr = new RpcDeniedReply(xid, RpcReply.ReplyState.MSG_ACCEPTED, - RpcDeniedReply.RejectState.AUTH_ERROR); + RpcDeniedReply.RejectState.AUTH_ERROR, new VerifierNone()); + rdr.write(reply); return reply; } } @@ -1857,11 +1858,13 @@ public XDR handleInternal(RpcCall rpcCall, final XDR xdr, XDR out, response = commit(xdr, securityHandler, client); } else { // Invalid procedure - RpcAcceptedReply.voidReply(out, xid, - RpcAcceptedReply.AcceptState.PROC_UNAVAIL); + RpcAcceptedReply.getInstance(xid, + RpcAcceptedReply.AcceptState.PROC_UNAVAIL, new VerifierNone()).write( + out); } if (response != null) { - out = response.send(out, xid); + // TODO: currently we just return VerifierNone + out = response.writeHeaderAndResponse(out, xid, new VerifierNone()); } return out; diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/WriteManager.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/WriteManager.java index f8f16a9d495..4cfc3aeb4ce 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/WriteManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/main/java/org/apache/hadoop/hdfs/nfs/nfs3/WriteManager.java @@ -39,6 +39,7 @@ import org.apache.hadoop.nfs.nfs3.response.WRITE3Response; import org.apache.hadoop.nfs.nfs3.response.WccData; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.apache.hadoop.util.Daemon; import org.jboss.netty.channel.Channel; @@ -118,7 +119,8 @@ void handleWrite(DFSClient dfsClient, WRITE3Request request, Channel channel, byte[] data = request.getData().array(); if (data.length < count) { WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_INVAL); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); return; } @@ -155,7 +157,8 @@ void handleWrite(DFSClient dfsClient, WRITE3Request request, Channel channel, WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO, fileWcc, count, request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); return; } @@ -182,10 +185,12 @@ void handleWrite(DFSClient dfsClient, WRITE3Request request, Channel channel, WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK, fileWcc, count, request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); } else { WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO); - Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid); + Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse( + new XDR(), xid, new VerifierNone()), xid); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestOutOfOrderWrite.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestOutOfOrderWrite.java index ebce6569b8d..7f3d8c58eea 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestOutOfOrderWrite.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestOutOfOrderWrite.java @@ -38,6 +38,8 @@ import org.apache.hadoop.oncrpc.SimpleTcpClient; import org.apache.hadoop.oncrpc.SimpleTcpClientHandler; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.CredentialsNone; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.channel.Channel; import org.jboss.netty.channel.ChannelHandlerContext; @@ -58,15 +60,9 @@ public class TestOutOfOrderWrite { static XDR create() { XDR request = new XDR(); - RpcCall.write(request, 0x8000004c, Nfs3Constant.PROGRAM, - Nfs3Constant.VERSION, Nfs3Constant.NFSPROC3.CREATE.getValue()); - - // credentials - request.writeInt(0); // auth null - request.writeInt(0); // length zero - // verifier - request.writeInt(0); // auth null - request.writeInt(0); // length zero + RpcCall.getInstance(0x8000004c, Nfs3Constant.PROGRAM, Nfs3Constant.VERSION, + Nfs3Constant.NFSPROC3.CREATE.getValue(), new CredentialsNone(), + new VerifierNone()).write(request); SetAttr3 objAttr = new SetAttr3(); CREATE3Request createReq = new CREATE3Request(new FileHandle("/"), @@ -78,15 +74,10 @@ static XDR create() { static XDR write(FileHandle handle, int xid, long offset, int count, byte[] data) { XDR request = new XDR(); - RpcCall.write(request, xid, Nfs3Constant.PROGRAM, Nfs3Constant.VERSION, - Nfs3Constant.NFSPROC3.WRITE.getValue()); + RpcCall.getInstance(xid, Nfs3Constant.PROGRAM, Nfs3Constant.VERSION, + Nfs3Constant.NFSPROC3.CREATE.getValue(), new CredentialsNone(), + new VerifierNone()).write(request); - // credentials - request.writeInt(0); // auth null - request.writeInt(0); // length zero - // verifier - request.writeInt(0); // auth null - request.writeInt(0); // length zero WRITE3Request write1 = new WRITE3Request(handle, offset, count, WriteStableHow.UNSTABLE, ByteBuffer.wrap(data)); write1.serialize(request); diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestPortmapRegister.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestPortmapRegister.java index b013339ed6c..769aa480202 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestPortmapRegister.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestPortmapRegister.java @@ -26,6 +26,8 @@ import org.apache.hadoop.oncrpc.RegistrationClient; import org.apache.hadoop.oncrpc.RpcCall; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.CredentialsNone; +import org.apache.hadoop.oncrpc.security.VerifierNone; import org.apache.hadoop.portmap.PortmapMapping; import org.apache.hadoop.portmap.PortmapRequest; @@ -78,11 +80,8 @@ public void run() { static void createPortmapXDRheader(XDR xdr_out, int procedure) { // TODO: Move this to RpcRequest - RpcCall.write(xdr_out, 0, 100000, 2, procedure); - xdr_out.writeInt(0); //no auth - xdr_out.writeInt(0); - xdr_out.writeInt(0); - xdr_out.writeInt(0); + RpcCall.getInstance(0, 100000, 2, procedure, new CredentialsNone(), + new VerifierNone()).write(xdr_out); /* xdr_out.putInt(1); //unix auth diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestUdpServer.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestUdpServer.java index 9c7c5599b56..46c16d3c7fa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestUdpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/TestUdpServer.java @@ -27,6 +27,8 @@ import org.apache.hadoop.nfs.nfs3.Nfs3Constant; import org.apache.hadoop.oncrpc.RpcCall; import org.apache.hadoop.oncrpc.XDR; +import org.apache.hadoop.oncrpc.security.CredentialsNone; +import org.apache.hadoop.oncrpc.security.VerifierNone; // TODO: convert this to Junit public class TestUdpServer { @@ -82,7 +84,8 @@ public void run() { static void createPortmapXDRheader(XDR xdr_out, int procedure) { // Make this a method - RpcCall.write(xdr_out, 0, 100000, 2, procedure); + RpcCall.getInstance(0, 100000, 2, procedure, new CredentialsNone(), + new VerifierNone()).write(xdr_out); } static void testGetportMount() { diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index b619b621ba0..154a62aa602 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -158,6 +158,9 @@ Release 2.1.1-beta - 2013-09-23 HDFS-4680. Audit logging of delegation tokens for MR tracing. (Andrew Wang) + HDFS-5212. Refactor RpcMessage and NFS3Response to support different + types of authentication information. (jing9) + OPTIMIZATIONS BUG FIXES