HDFS-5212. Merge change r1524298 from trunk.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1524302 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
0ce8f0b3c2
commit
8855a3e0ee
|
@ -22,6 +22,7 @@ import java.util.List;
|
||||||
import org.apache.hadoop.nfs.NfsExports;
|
import org.apache.hadoop.nfs.NfsExports;
|
||||||
import org.apache.hadoop.oncrpc.RpcAcceptedReply;
|
import org.apache.hadoop.oncrpc.RpcAcceptedReply;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
|
import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -37,7 +38,7 @@ public class MountResponse {
|
||||||
/** Response for RPC call {@link MountInterface.MNTPROC#MNT} */
|
/** Response for RPC call {@link MountInterface.MNTPROC#MNT} */
|
||||||
public static XDR writeMNTResponse(int status, XDR xdr, int xid,
|
public static XDR writeMNTResponse(int status, XDR xdr, int xid,
|
||||||
byte[] handle) {
|
byte[] handle) {
|
||||||
RpcAcceptedReply.voidReply(xdr, xid);
|
RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr);
|
||||||
xdr.writeInt(status);
|
xdr.writeInt(status);
|
||||||
if (status == MNT_OK) {
|
if (status == MNT_OK) {
|
||||||
xdr.writeVariableOpaque(handle);
|
xdr.writeVariableOpaque(handle);
|
||||||
|
@ -50,7 +51,7 @@ public class MountResponse {
|
||||||
|
|
||||||
/** Response for RPC call {@link MountInterface.MNTPROC#DUMP} */
|
/** Response for RPC call {@link MountInterface.MNTPROC#DUMP} */
|
||||||
public static XDR writeMountList(XDR xdr, int xid, List<MountEntry> mounts) {
|
public static XDR writeMountList(XDR xdr, int xid, List<MountEntry> mounts) {
|
||||||
RpcAcceptedReply.voidReply(xdr, xid);
|
RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr);
|
||||||
for (MountEntry mountEntry : mounts) {
|
for (MountEntry mountEntry : mounts) {
|
||||||
xdr.writeBoolean(true); // Value follows yes
|
xdr.writeBoolean(true); // Value follows yes
|
||||||
xdr.writeString(mountEntry.host());
|
xdr.writeString(mountEntry.host());
|
||||||
|
@ -65,7 +66,7 @@ public class MountResponse {
|
||||||
List<NfsExports> hostMatcher) {
|
List<NfsExports> hostMatcher) {
|
||||||
assert (exports.size() == hostMatcher.size());
|
assert (exports.size() == hostMatcher.size());
|
||||||
|
|
||||||
RpcAcceptedReply.voidReply(xdr, xid);
|
RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr);
|
||||||
for (int i = 0; i < exports.size(); i++) {
|
for (int i = 0; i < exports.size(); i++) {
|
||||||
xdr.writeBoolean(true); // Value follows - yes
|
xdr.writeBoolean(true); // Value follows - yes
|
||||||
xdr.writeString(exports.get(i));
|
xdr.writeString(exports.get(i));
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.nfs.nfs3.response;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* ACCESS3 Response
|
* ACCESS3 Response
|
||||||
|
@ -43,8 +44,8 @@ public class ACCESS3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
out.writeBoolean(true);
|
out.writeBoolean(true);
|
||||||
postOpAttr.serialize(out);
|
postOpAttr.serialize(out);
|
||||||
if (this.getStatus() == Nfs3Status.NFS3_OK) {
|
if (this.getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.nfs.nfs3.response;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* COMMIT3 Response
|
* COMMIT3 Response
|
||||||
|
@ -47,8 +48,8 @@ public class COMMIT3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
fileWcc.serialize(out);
|
fileWcc.serialize(out);
|
||||||
if (getStatus() == Nfs3Status.NFS3_OK) {
|
if (getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
out.writeLongAsHyper(verf);
|
out.writeLongAsHyper(verf);
|
||||||
|
|
|
@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* CREATE3 Response
|
* CREATE3 Response
|
||||||
|
@ -55,8 +56,8 @@ public class CREATE3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
if (getStatus() == Nfs3Status.NFS3_OK) {
|
if (getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
out.writeBoolean(true); // Handle follows
|
out.writeBoolean(true); // Handle follows
|
||||||
objHandle.serialize(out);
|
objHandle.serialize(out);
|
||||||
|
|
|
@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.NfsTime;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* FSINFO3 Response
|
* FSINFO3 Response
|
||||||
|
@ -109,8 +110,8 @@ public class FSINFO3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
out.writeBoolean(true);
|
out.writeBoolean(true);
|
||||||
postOpAttr.serialize(out);
|
postOpAttr.serialize(out);
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.nfs.nfs3.response;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* FSSTAT3 Response
|
* FSSTAT3 Response
|
||||||
|
@ -90,8 +91,8 @@ public class FSSTAT3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
out.writeBoolean(true);
|
out.writeBoolean(true);
|
||||||
if (postOpAttr == null) {
|
if (postOpAttr == null) {
|
||||||
postOpAttr = new Nfs3FileAttributes();
|
postOpAttr = new Nfs3FileAttributes();
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.nfs.nfs3.response;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GETATTR3 Response
|
* GETATTR3 Response
|
||||||
|
@ -40,8 +41,8 @@ public class GETATTR3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
if (getStatus() == Nfs3Status.NFS3_OK) {
|
if (getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
postOpAttr.serialize(out);
|
postOpAttr.serialize(out);
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,6 +23,7 @@ import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* LOOKUP3 Response
|
* LOOKUP3 Response
|
||||||
|
@ -61,8 +62,8 @@ public class LOOKUP3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
if (this.status == Nfs3Status.NFS3_OK) {
|
if (this.status == Nfs3Status.NFS3_OK) {
|
||||||
fileHandle.serialize(out);
|
fileHandle.serialize(out);
|
||||||
out.writeBoolean(true); // Attribute follows
|
out.writeBoolean(true); // Attribute follows
|
||||||
|
|
|
@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* MKDIR3 Response
|
* MKDIR3 Response
|
||||||
|
@ -55,8 +56,8 @@ public class MKDIR3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
if (getStatus() == Nfs3Status.NFS3_OK) {
|
if (getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
out.writeBoolean(true); // Handle follows
|
out.writeBoolean(true); // Handle follows
|
||||||
objFileHandle.serialize(out);
|
objFileHandle.serialize(out);
|
||||||
|
|
|
@ -19,11 +19,13 @@ package org.apache.hadoop.nfs.nfs3.response;
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.RpcAcceptedReply;
|
import org.apache.hadoop.oncrpc.RpcAcceptedReply;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abstract class for a NFSv3 response
|
* Base class for a NFSv3 response. This class and its subclasses contain
|
||||||
|
* the response from NFSv3 handlers.
|
||||||
*/
|
*/
|
||||||
abstract public class NFS3Response {
|
public class NFS3Response {
|
||||||
protected int status;
|
protected int status;
|
||||||
|
|
||||||
public NFS3Response(int status) {
|
public NFS3Response(int status) {
|
||||||
|
@ -38,8 +40,13 @@ abstract public class NFS3Response {
|
||||||
this.status = status;
|
this.status = status;
|
||||||
}
|
}
|
||||||
|
|
||||||
public XDR send(XDR out, int xid) {
|
/**
|
||||||
RpcAcceptedReply.voidReply(out, xid);
|
* Write the response, along with the rpc header (including verifier), to the
|
||||||
|
* XDR.
|
||||||
|
*/
|
||||||
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
|
RpcAcceptedReply reply = RpcAcceptedReply.getAcceptInstance(xid, verifier);
|
||||||
|
reply.write(out);
|
||||||
out.writeInt(this.getStatus());
|
out.writeInt(this.getStatus());
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.nfs.nfs3.response;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* PATHCONF3 Response
|
* PATHCONF3 Response
|
||||||
|
@ -77,8 +78,8 @@ public class PATHCONF3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
out.writeBoolean(true);
|
out.writeBoolean(true);
|
||||||
postOpAttr.serialize(out);
|
postOpAttr.serialize(out);
|
||||||
|
|
||||||
|
|
|
@ -22,6 +22,7 @@ import java.nio.ByteBuffer;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* READ3 Response
|
* READ3 Response
|
||||||
|
@ -62,8 +63,8 @@ public class READ3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
out.writeBoolean(true); // Attribute follows
|
out.writeBoolean(true); // Attribute follows
|
||||||
postOpAttr.serialize(out);
|
postOpAttr.serialize(out);
|
||||||
|
|
||||||
|
|
|
@ -24,6 +24,7 @@ import java.util.List;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* READDIR3 Response
|
* READDIR3 Response
|
||||||
|
@ -96,8 +97,8 @@ public class READDIR3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR xdr, int xid) {
|
public XDR writeHeaderAndResponse(XDR xdr, int xid, Verifier verifier) {
|
||||||
super.send(xdr, xid);
|
super.writeHeaderAndResponse(xdr, xid, verifier);
|
||||||
xdr.writeBoolean(true); // Attributes follow
|
xdr.writeBoolean(true); // Attributes follow
|
||||||
postOpDirAttr.serialize(xdr);
|
postOpDirAttr.serialize(xdr);
|
||||||
|
|
||||||
|
|
|
@ -25,6 +25,7 @@ import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* READDIRPLUS3 Response
|
* READDIRPLUS3 Response
|
||||||
|
@ -92,8 +93,8 @@ public class READDIRPLUS3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
out.writeBoolean(true); // attributes follow
|
out.writeBoolean(true); // attributes follow
|
||||||
if (postOpDirAttr == null) {
|
if (postOpDirAttr == null) {
|
||||||
postOpDirAttr = new Nfs3FileAttributes();
|
postOpDirAttr = new Nfs3FileAttributes();
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.nfs.nfs3.response;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* READLINK3 Response
|
* READLINK3 Response
|
||||||
|
@ -41,8 +42,8 @@ public class READLINK3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
out.writeBoolean(true); // Attribute follows
|
out.writeBoolean(true); // Attribute follows
|
||||||
postOpSymlinkAttr.serialize(out);
|
postOpSymlinkAttr.serialize(out);
|
||||||
if (getStatus() == Nfs3Status.NFS3_OK) {
|
if (getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.nfs.nfs3.response;
|
package org.apache.hadoop.nfs.nfs3.response;
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* REMOVE3 Response
|
* REMOVE3 Response
|
||||||
|
@ -35,8 +36,8 @@ public class REMOVE3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
if (dirWcc == null) {
|
if (dirWcc == null) {
|
||||||
dirWcc = new WccData(null, null);
|
dirWcc = new WccData(null, null);
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.nfs.nfs3.response;
|
package org.apache.hadoop.nfs.nfs3.response;
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* RENAME3 Response
|
* RENAME3 Response
|
||||||
|
@ -45,8 +46,8 @@ public class RENAME3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
fromDirWcc.serialize(out);
|
fromDirWcc.serialize(out);
|
||||||
toDirWcc.serialize(out);
|
toDirWcc.serialize(out);
|
||||||
return out;
|
return out;
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.nfs.nfs3.response;
|
package org.apache.hadoop.nfs.nfs3.response;
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* RMDIR3 Response
|
* RMDIR3 Response
|
||||||
|
@ -39,8 +40,8 @@ public class RMDIR3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
dirWcc.serialize(out);
|
dirWcc.serialize(out);
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,6 +18,7 @@
|
||||||
package org.apache.hadoop.nfs.nfs3.response;
|
package org.apache.hadoop.nfs.nfs3.response;
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SETATTR3 Response
|
* SETATTR3 Response
|
||||||
|
@ -39,8 +40,8 @@ public class SETATTR3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
wccData.serialize(out);
|
wccData.serialize(out);
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* SYMLINK3 Response
|
* SYMLINK3 Response
|
||||||
|
@ -55,8 +56,8 @@ public class SYMLINK3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
if (this.getStatus() == Nfs3Status.NFS3_OK) {
|
if (this.getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
out.writeBoolean(true);
|
out.writeBoolean(true);
|
||||||
objFileHandle.serialize(out);
|
objFileHandle.serialize(out);
|
||||||
|
|
|
@ -1,37 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.nfs.nfs3.response;
|
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.RpcAcceptedReply;
|
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A void NFSv3 response
|
|
||||||
*/
|
|
||||||
public class VoidResponse extends NFS3Response {
|
|
||||||
|
|
||||||
public VoidResponse(int status) {
|
|
||||||
super(status);
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public XDR send(XDR out, int xid) {
|
|
||||||
RpcAcceptedReply.voidReply(out, xid);
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -21,6 +21,7 @@ import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Constant.WriteStableHow;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Constant.WriteStableHow;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* WRITE3 Response
|
* WRITE3 Response
|
||||||
|
@ -58,8 +59,8 @@ public class WRITE3Response extends NFS3Response {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public XDR send(XDR out, int xid) {
|
public XDR writeHeaderAndResponse(XDR out, int xid, Verifier verifier) {
|
||||||
super.send(out, xid);
|
super.writeHeaderAndResponse(out, xid, verifier);
|
||||||
fileWcc.serialize(out);
|
fileWcc.serialize(out);
|
||||||
if (getStatus() == Nfs3Status.NFS3_OK) {
|
if (getStatus() == Nfs3Status.NFS3_OK) {
|
||||||
out.writeInt(count);
|
out.writeInt(count);
|
||||||
|
|
|
@ -18,8 +18,6 @@
|
||||||
package org.apache.hadoop.oncrpc;
|
package org.apache.hadoop.oncrpc;
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.security.Verifier;
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
import org.apache.hadoop.oncrpc.security.RpcAuthInfo;
|
|
||||||
import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents RPC message MSG_ACCEPTED reply body. See RFC 1831 for details.
|
* Represents RPC message MSG_ACCEPTED reply body. See RFC 1831 for details.
|
||||||
|
@ -43,43 +41,42 @@ public class RpcAcceptedReply extends RpcReply {
|
||||||
return ordinal();
|
return ordinal();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
public static RpcAcceptedReply getAcceptInstance(int xid,
|
||||||
|
Verifier verifier) {
|
||||||
|
return getInstance(xid, AcceptState.SUCCESS, verifier);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static RpcAcceptedReply getInstance(int xid, AcceptState state,
|
||||||
|
Verifier verifier) {
|
||||||
|
return new RpcAcceptedReply(xid, ReplyState.MSG_ACCEPTED, verifier,
|
||||||
|
state);
|
||||||
|
}
|
||||||
|
|
||||||
private final RpcAuthInfo verifier;
|
|
||||||
private final AcceptState acceptState;
|
private final AcceptState acceptState;
|
||||||
|
|
||||||
RpcAcceptedReply(int xid, RpcMessage.Type messageType, ReplyState state,
|
RpcAcceptedReply(int xid, ReplyState state, Verifier verifier,
|
||||||
RpcAuthInfo verifier, AcceptState acceptState) {
|
AcceptState acceptState) {
|
||||||
super(xid, messageType, state);
|
super(xid, state, verifier);
|
||||||
this.verifier = verifier;
|
|
||||||
this.acceptState = acceptState;
|
this.acceptState = acceptState;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static RpcAcceptedReply read(int xid, RpcMessage.Type messageType,
|
public static RpcAcceptedReply read(int xid, ReplyState replyState, XDR xdr) {
|
||||||
ReplyState replyState, XDR xdr) {
|
|
||||||
Verifier verifier = Verifier.readFlavorAndVerifier(xdr);
|
Verifier verifier = Verifier.readFlavorAndVerifier(xdr);
|
||||||
AcceptState acceptState = AcceptState.fromValue(xdr.readInt());
|
AcceptState acceptState = AcceptState.fromValue(xdr.readInt());
|
||||||
return new RpcAcceptedReply(xid, messageType, replyState, verifier,
|
return new RpcAcceptedReply(xid, replyState, verifier, acceptState);
|
||||||
acceptState);
|
|
||||||
}
|
|
||||||
|
|
||||||
public RpcAuthInfo getVerifier() {
|
|
||||||
return verifier;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public AcceptState getAcceptState() {
|
public AcceptState getAcceptState() {
|
||||||
return acceptState;
|
return acceptState;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static XDR voidReply(XDR xdr, int xid) {
|
@Override
|
||||||
return voidReply(xdr, xid, AcceptState.SUCCESS);
|
public XDR write(XDR xdr) {
|
||||||
}
|
|
||||||
|
|
||||||
public static XDR voidReply(XDR xdr, int xid, AcceptState acceptState) {
|
|
||||||
xdr.writeInt(xid);
|
xdr.writeInt(xid);
|
||||||
xdr.writeInt(RpcMessage.Type.RPC_REPLY.getValue());
|
xdr.writeInt(messageType.getValue());
|
||||||
xdr.writeInt(ReplyState.MSG_ACCEPTED.getValue());
|
xdr.writeInt(replyState.getValue());
|
||||||
xdr.writeInt(AuthFlavor.AUTH_NONE.getValue());
|
Verifier.writeFlavorAndVerifier(verifier, xdr);
|
||||||
xdr.writeVariableOpaque(new byte[0]);
|
|
||||||
xdr.writeInt(acceptState.getValue());
|
xdr.writeInt(acceptState.getValue());
|
||||||
return xdr;
|
return xdr;
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,11 +28,25 @@ import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
public class RpcCall extends RpcMessage {
|
public class RpcCall extends RpcMessage {
|
||||||
public static final int RPC_VERSION = 2;
|
public static final int RPC_VERSION = 2;
|
||||||
private static final Log LOG = LogFactory.getLog(RpcCall.class);
|
private static final Log LOG = LogFactory.getLog(RpcCall.class);
|
||||||
|
|
||||||
|
public static RpcCall read(XDR xdr) {
|
||||||
|
return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()),
|
||||||
|
xdr.readInt(), xdr.readInt(), xdr.readInt(), xdr.readInt(),
|
||||||
|
Credentials.readFlavorAndCredentials(xdr),
|
||||||
|
Verifier.readFlavorAndVerifier(xdr));
|
||||||
|
}
|
||||||
|
|
||||||
|
public static RpcCall getInstance(int xid, int program, int version,
|
||||||
|
int procedure, Credentials cred, Verifier verifier) {
|
||||||
|
return new RpcCall(xid, RpcMessage.Type.RPC_CALL, 2, program, version,
|
||||||
|
procedure, cred, verifier);
|
||||||
|
}
|
||||||
|
|
||||||
private final int rpcVersion;
|
private final int rpcVersion;
|
||||||
private final int program;
|
private final int program;
|
||||||
private final int version;
|
private final int version;
|
||||||
private final int procedure;
|
private final int procedure;
|
||||||
private final Credentials credential;
|
private final Credentials credentials;
|
||||||
private final Verifier verifier;
|
private final Verifier verifier;
|
||||||
|
|
||||||
protected RpcCall(int xid, RpcMessage.Type messageType, int rpcVersion,
|
protected RpcCall(int xid, RpcMessage.Type messageType, int rpcVersion,
|
||||||
|
@ -43,7 +57,7 @@ public class RpcCall extends RpcMessage {
|
||||||
this.program = program;
|
this.program = program;
|
||||||
this.version = version;
|
this.version = version;
|
||||||
this.procedure = procedure;
|
this.procedure = procedure;
|
||||||
this.credential = credential;
|
this.credentials = credential;
|
||||||
this.verifier = verifier;
|
this.verifier = verifier;
|
||||||
if (LOG.isTraceEnabled()) {
|
if (LOG.isTraceEnabled()) {
|
||||||
LOG.trace(this);
|
LOG.trace(this);
|
||||||
|
@ -83,28 +97,24 @@ public class RpcCall extends RpcMessage {
|
||||||
}
|
}
|
||||||
|
|
||||||
public Credentials getCredential() {
|
public Credentials getCredential() {
|
||||||
return credential;
|
return credentials;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Verifier getVerifier() {
|
public Verifier getVerifier() {
|
||||||
return verifier;
|
return verifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static RpcCall read(XDR xdr) {
|
@Override
|
||||||
return new RpcCall(xdr.readInt(), RpcMessage.Type.fromValue(xdr.readInt()),
|
public XDR write(XDR xdr) {
|
||||||
xdr.readInt(), xdr.readInt(), xdr.readInt(), xdr.readInt(),
|
xdr.writeInt(xid);
|
||||||
Credentials.readFlavorAndCredentials(xdr),
|
xdr.writeInt(RpcMessage.Type.RPC_CALL.getValue());
|
||||||
Verifier.readFlavorAndVerifier(xdr));
|
xdr.writeInt(2);
|
||||||
}
|
xdr.writeInt(program);
|
||||||
|
xdr.writeInt(version);
|
||||||
public static void write(XDR out, int xid, int program, int progVersion,
|
xdr.writeInt(procedure);
|
||||||
int procedure) {
|
Credentials.writeFlavorAndCredentials(credentials, xdr);
|
||||||
out.writeInt(xid);
|
Verifier.writeFlavorAndVerifier(verifier, xdr);
|
||||||
out.writeInt(RpcMessage.Type.RPC_CALL.getValue());
|
return xdr;
|
||||||
out.writeInt(2);
|
|
||||||
out.writeInt(program);
|
|
||||||
out.writeInt(progVersion);
|
|
||||||
out.writeInt(procedure);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -112,6 +122,6 @@ public class RpcCall extends RpcMessage {
|
||||||
return String.format("Xid:%d, messageType:%s, rpcVersion:%d, program:%d,"
|
return String.format("Xid:%d, messageType:%s, rpcVersion:%d, program:%d,"
|
||||||
+ " version:%d, procedure:%d, credential:%s, verifier:%s", xid,
|
+ " version:%d, procedure:%d, credential:%s, verifier:%s", xid,
|
||||||
messageType, rpcVersion, program, version, procedure,
|
messageType, rpcVersion, program, version, procedure,
|
||||||
credential.toString(), verifier.toString());
|
credentials.toString(), verifier.toString());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.oncrpc;
|
package org.apache.hadoop.oncrpc;
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents RPC message MSG_DENIED reply body. See RFC 1831 for details.
|
* Represents RPC message MSG_DENIED reply body. See RFC 1831 for details.
|
||||||
|
@ -40,16 +40,16 @@ public class RpcDeniedReply extends RpcReply {
|
||||||
|
|
||||||
private final RejectState rejectState;
|
private final RejectState rejectState;
|
||||||
|
|
||||||
RpcDeniedReply(int xid, RpcMessage.Type messageType, ReplyState replyState,
|
public RpcDeniedReply(int xid, ReplyState replyState,
|
||||||
RejectState rejectState) {
|
RejectState rejectState, Verifier verifier) {
|
||||||
super(xid, messageType, replyState);
|
super(xid, replyState, verifier);
|
||||||
this.rejectState = rejectState;
|
this.rejectState = rejectState;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static RpcDeniedReply read(int xid, RpcMessage.Type messageType,
|
public static RpcDeniedReply read(int xid, ReplyState replyState, XDR xdr) {
|
||||||
ReplyState replyState, XDR xdr) {
|
Verifier verifier = Verifier.readFlavorAndVerifier(xdr);
|
||||||
RejectState rejectState = RejectState.fromValue(xdr.readInt());
|
RejectState rejectState = RejectState.fromValue(xdr.readInt());
|
||||||
return new RpcDeniedReply(xid, messageType, replyState, rejectState);
|
return new RpcDeniedReply(xid, replyState, rejectState, verifier);
|
||||||
}
|
}
|
||||||
|
|
||||||
public RejectState getRejectState() {
|
public RejectState getRejectState() {
|
||||||
|
@ -59,17 +59,17 @@ public class RpcDeniedReply extends RpcReply {
|
||||||
@Override
|
@Override
|
||||||
public String toString() {
|
public String toString() {
|
||||||
return new StringBuffer().append("xid:").append(xid)
|
return new StringBuffer().append("xid:").append(xid)
|
||||||
.append(",messageType:").append(messageType).append("rejectState:")
|
.append(",messageType:").append(messageType).append("verifier_flavor:")
|
||||||
|
.append(verifier.getFlavor()).append("rejectState:")
|
||||||
.append(rejectState).toString();
|
.append(rejectState).toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
public static XDR voidReply(XDR xdr, int xid, ReplyState msgAccepted,
|
@Override
|
||||||
RejectState rejectState) {
|
public XDR write(XDR xdr) {
|
||||||
xdr.writeInt(xid);
|
xdr.writeInt(xid);
|
||||||
xdr.writeInt(RpcMessage.Type.RPC_REPLY.getValue());
|
xdr.writeInt(messageType.getValue());
|
||||||
xdr.writeInt(msgAccepted.getValue());
|
xdr.writeInt(replyState.getValue());
|
||||||
xdr.writeInt(AuthFlavor.AUTH_NONE.getValue());
|
Verifier.writeFlavorAndVerifier(verifier, xdr);
|
||||||
xdr.writeVariableOpaque(new byte[0]);
|
|
||||||
xdr.writeInt(rejectState.getValue());
|
xdr.writeInt(rejectState.getValue());
|
||||||
return xdr;
|
return xdr;
|
||||||
}
|
}
|
||||||
|
|
|
@ -50,6 +50,8 @@ public abstract class RpcMessage {
|
||||||
this.messageType = messageType;
|
this.messageType = messageType;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public abstract XDR write(XDR xdr);
|
||||||
|
|
||||||
public int getXid() {
|
public int getXid() {
|
||||||
return xid;
|
return xid;
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,7 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState;
|
import org.apache.hadoop.oncrpc.RpcAcceptedReply.AcceptState;
|
||||||
import org.apache.hadoop.oncrpc.RpcCallCache.CacheEntry;
|
import org.apache.hadoop.oncrpc.RpcCallCache.CacheEntry;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.apache.hadoop.portmap.PortmapMapping;
|
import org.apache.hadoop.portmap.PortmapMapping;
|
||||||
import org.apache.hadoop.portmap.PortmapRequest;
|
import org.apache.hadoop.portmap.PortmapRequest;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
|
@ -163,13 +164,17 @@ public abstract class RpcProgram {
|
||||||
|
|
||||||
private XDR programMismatch(XDR out, RpcCall call) {
|
private XDR programMismatch(XDR out, RpcCall call) {
|
||||||
LOG.warn("Invalid RPC call program " + call.getProgram());
|
LOG.warn("Invalid RPC call program " + call.getProgram());
|
||||||
RpcAcceptedReply.voidReply(out, call.getXid(), AcceptState.PROG_UNAVAIL);
|
RpcAcceptedReply reply = RpcAcceptedReply.getInstance(call.getXid(),
|
||||||
|
AcceptState.PROG_UNAVAIL, new VerifierNone());
|
||||||
|
reply.write(out);
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
private XDR programVersionMismatch(XDR out, RpcCall call) {
|
private XDR programVersionMismatch(XDR out, RpcCall call) {
|
||||||
LOG.warn("Invalid RPC call version " + call.getVersion());
|
LOG.warn("Invalid RPC call version " + call.getVersion());
|
||||||
RpcAcceptedReply.voidReply(out, call.getXid(), AcceptState.PROG_MISMATCH);
|
RpcAcceptedReply reply = RpcAcceptedReply.getInstance(call.getXid(),
|
||||||
|
AcceptState.PROG_MISMATCH, new VerifierNone());
|
||||||
|
reply.write(out);
|
||||||
out.writeInt(lowProgVersion);
|
out.writeInt(lowProgVersion);
|
||||||
out.writeInt(highProgVersion);
|
out.writeInt(highProgVersion);
|
||||||
return out;
|
return out;
|
||||||
|
|
|
@ -17,6 +17,11 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.oncrpc;
|
package org.apache.hadoop.oncrpc;
|
||||||
|
|
||||||
|
import org.apache.hadoop.oncrpc.security.RpcAuthInfo;
|
||||||
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
|
||||||
|
import com.google.common.base.Preconditions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Represents an RPC message of type RPC reply as defined in RFC 1831
|
* Represents an RPC message of type RPC reply as defined in RFC 1831
|
||||||
*/
|
*/
|
||||||
|
@ -36,28 +41,35 @@ public abstract class RpcReply extends RpcMessage {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private final ReplyState state;
|
protected final ReplyState replyState;
|
||||||
|
protected final Verifier verifier;
|
||||||
|
|
||||||
RpcReply(int xid, RpcMessage.Type messageType, ReplyState state) {
|
RpcReply(int xid, ReplyState state, Verifier verifier) {
|
||||||
super(xid, messageType);
|
super(xid, RpcMessage.Type.RPC_REPLY);
|
||||||
this.state = state;
|
this.replyState = state;
|
||||||
validateMessageType(RpcMessage.Type.RPC_REPLY);
|
this.verifier = verifier;
|
||||||
|
}
|
||||||
|
|
||||||
|
public RpcAuthInfo getVerifier() {
|
||||||
|
return verifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static RpcReply read(XDR xdr) {
|
public static RpcReply read(XDR xdr) {
|
||||||
int xid = xdr.readInt();
|
int xid = xdr.readInt();
|
||||||
final Type messageType = Type.fromValue(xdr.readInt());
|
final Type messageType = Type.fromValue(xdr.readInt());
|
||||||
|
Preconditions.checkState(messageType == RpcMessage.Type.RPC_REPLY);
|
||||||
|
|
||||||
ReplyState stat = ReplyState.fromValue(xdr.readInt());
|
ReplyState stat = ReplyState.fromValue(xdr.readInt());
|
||||||
switch (stat) {
|
switch (stat) {
|
||||||
case MSG_ACCEPTED:
|
case MSG_ACCEPTED:
|
||||||
return RpcAcceptedReply.read(xid, messageType, stat, xdr);
|
return RpcAcceptedReply.read(xid, stat, xdr);
|
||||||
case MSG_DENIED:
|
case MSG_DENIED:
|
||||||
return RpcDeniedReply.read(xid, messageType, stat, xdr);
|
return RpcDeniedReply.read(xid, stat, xdr);
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReplyState getState() {
|
public ReplyState getState() {
|
||||||
return state;
|
return replyState;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -45,6 +45,22 @@ public abstract class Credentials extends RpcAuthInfo {
|
||||||
return credentials;
|
return credentials;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write AuthFlavor and the credentials to the XDR
|
||||||
|
*/
|
||||||
|
public static void writeFlavorAndCredentials(Credentials cred, XDR xdr) {
|
||||||
|
if (cred instanceof CredentialsNone) {
|
||||||
|
xdr.writeInt(AuthFlavor.AUTH_NONE.getValue());
|
||||||
|
} else if (cred instanceof CredentialsSys) {
|
||||||
|
xdr.writeInt(AuthFlavor.AUTH_SYS.getValue());
|
||||||
|
} else if (cred instanceof CredentialsGSS) {
|
||||||
|
xdr.writeInt(AuthFlavor.RPCSEC_GSS.getValue());
|
||||||
|
} else {
|
||||||
|
throw new UnsupportedOperationException("Cannot recognize the verifier");
|
||||||
|
}
|
||||||
|
cred.write(xdr);
|
||||||
|
}
|
||||||
|
|
||||||
protected int mCredentialsLength;
|
protected int mCredentialsLength;
|
||||||
|
|
||||||
protected Credentials(AuthFlavor flavor) {
|
protected Credentials(AuthFlavor flavor) {
|
||||||
|
|
|
@ -20,10 +20,11 @@ package org.apache.hadoop.oncrpc.security;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
|
import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class for verifier. Currently we only support 3 types of auth flavors:
|
* Base class for verifier. Currently our authentication only supports 3 types
|
||||||
* {@link AuthFlavor#AUTH_NONE}, {@link AuthFlavor#AUTH_SYS},
|
* of auth flavors: {@link AuthFlavor#AUTH_NONE}, {@link AuthFlavor#AUTH_SYS},
|
||||||
* and {@link AuthFlavor#RPCSEC_GSS}.
|
* and {@link AuthFlavor#RPCSEC_GSS}. Thus for verifier we only need to handle
|
||||||
|
* AUTH_NONE and RPCSEC_GSS
|
||||||
*/
|
*/
|
||||||
public abstract class Verifier extends RpcAuthInfo {
|
public abstract class Verifier extends RpcAuthInfo {
|
||||||
|
|
||||||
|
@ -31,6 +32,7 @@ public abstract class Verifier extends RpcAuthInfo {
|
||||||
super(flavor);
|
super(flavor);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/** Read both AuthFlavor and the verifier from the XDR */
|
||||||
public static Verifier readFlavorAndVerifier(XDR xdr) {
|
public static Verifier readFlavorAndVerifier(XDR xdr) {
|
||||||
AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt());
|
AuthFlavor flavor = AuthFlavor.fromValue(xdr.readInt());
|
||||||
final Verifier verifer;
|
final Verifier verifer;
|
||||||
|
@ -46,4 +48,19 @@ public abstract class Verifier extends RpcAuthInfo {
|
||||||
return verifer;
|
return verifer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Write AuthFlavor and the verifier to the XDR
|
||||||
|
*/
|
||||||
|
public static void writeFlavorAndVerifier(Verifier verifier, XDR xdr) {
|
||||||
|
if (verifier instanceof VerifierNone) {
|
||||||
|
xdr.writeInt(AuthFlavor.AUTH_NONE.getValue());
|
||||||
|
} else if (verifier instanceof VerifierGSS) {
|
||||||
|
xdr.writeInt(AuthFlavor.RPCSEC_GSS.getValue());
|
||||||
|
} else {
|
||||||
|
throw new UnsupportedOperationException("Cannot recognize the verifier");
|
||||||
|
}
|
||||||
|
verifier.write(xdr);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,10 +21,7 @@ import org.apache.hadoop.oncrpc.RpcCall;
|
||||||
import org.apache.hadoop.oncrpc.RpcUtil;
|
import org.apache.hadoop.oncrpc.RpcUtil;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
import org.apache.hadoop.oncrpc.security.CredentialsNone;
|
import org.apache.hadoop.oncrpc.security.CredentialsNone;
|
||||||
import org.apache.hadoop.oncrpc.security.Credentials;
|
|
||||||
import org.apache.hadoop.oncrpc.security.Verifier;
|
|
||||||
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
|
|
||||||
import org.apache.hadoop.portmap.PortmapInterface.Procedure;
|
import org.apache.hadoop.portmap.PortmapInterface.Procedure;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -37,16 +34,12 @@ public class PortmapRequest {
|
||||||
|
|
||||||
public static XDR create(PortmapMapping mapping) {
|
public static XDR create(PortmapMapping mapping) {
|
||||||
XDR request = new XDR();
|
XDR request = new XDR();
|
||||||
RpcCall.write(request,
|
RpcCall call = RpcCall.getInstance(
|
||||||
RpcUtil.getNewXid(String.valueOf(RpcProgramPortmap.PROGRAM)),
|
RpcUtil.getNewXid(String.valueOf(RpcProgramPortmap.PROGRAM)),
|
||||||
RpcProgramPortmap.PROGRAM, RpcProgramPortmap.VERSION,
|
RpcProgramPortmap.PROGRAM, RpcProgramPortmap.VERSION,
|
||||||
Procedure.PMAPPROC_SET.getValue());
|
Procedure.PMAPPROC_SET.getValue(), new CredentialsNone(),
|
||||||
request.writeInt(AuthFlavor.AUTH_NONE.getValue());
|
new VerifierNone());
|
||||||
Credentials credential = new CredentialsNone();
|
call.write(request);
|
||||||
credential.write(request);
|
|
||||||
request.writeInt(AuthFlavor.AUTH_NONE.getValue());
|
|
||||||
Verifier verifier = new VerifierNone();
|
|
||||||
verifier.write(request);
|
|
||||||
return mapping.serialize(request);
|
return mapping.serialize(request);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,30 +22,31 @@ import java.util.Collection;
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.RpcAcceptedReply;
|
import org.apache.hadoop.oncrpc.RpcAcceptedReply;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Helper utility for sending portmap response.
|
* Helper utility for sending portmap response.
|
||||||
*/
|
*/
|
||||||
public class PortmapResponse {
|
public class PortmapResponse {
|
||||||
public static XDR voidReply(XDR xdr, int xid) {
|
public static XDR voidReply(XDR xdr, int xid) {
|
||||||
RpcAcceptedReply.voidReply(xdr, xid);
|
RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr);
|
||||||
return xdr;
|
return xdr;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static XDR intReply(XDR xdr, int xid, int value) {
|
public static XDR intReply(XDR xdr, int xid, int value) {
|
||||||
RpcAcceptedReply.voidReply(xdr, xid);
|
RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr);
|
||||||
xdr.writeInt(value);
|
xdr.writeInt(value);
|
||||||
return xdr;
|
return xdr;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static XDR booleanReply(XDR xdr, int xid, boolean value) {
|
public static XDR booleanReply(XDR xdr, int xid, boolean value) {
|
||||||
RpcAcceptedReply.voidReply(xdr, xid);
|
RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr);
|
||||||
xdr.writeBoolean(value);
|
xdr.writeBoolean(value);
|
||||||
return xdr;
|
return xdr;
|
||||||
}
|
}
|
||||||
|
|
||||||
public static XDR pmapList(XDR xdr, int xid, Collection<PortmapMapping> list) {
|
public static XDR pmapList(XDR xdr, int xid, Collection<PortmapMapping> list) {
|
||||||
RpcAcceptedReply.voidReply(xdr, xid);
|
RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(xdr);
|
||||||
for (PortmapMapping mapping : list) {
|
for (PortmapMapping mapping : list) {
|
||||||
System.out.println(mapping);
|
System.out.println(mapping);
|
||||||
xdr.writeBoolean(true); // Value follows
|
xdr.writeBoolean(true); // Value follows
|
||||||
|
|
|
@ -28,6 +28,7 @@ import org.apache.hadoop.oncrpc.RpcAcceptedReply;
|
||||||
import org.apache.hadoop.oncrpc.RpcCall;
|
import org.apache.hadoop.oncrpc.RpcCall;
|
||||||
import org.apache.hadoop.oncrpc.RpcProgram;
|
import org.apache.hadoop.oncrpc.RpcProgram;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -147,8 +148,9 @@ public class RpcProgramPortmap extends RpcProgram implements PortmapInterface {
|
||||||
out = getport(xid, in, out);
|
out = getport(xid, in, out);
|
||||||
} else {
|
} else {
|
||||||
LOG.info("PortmapHandler unknown rpc procedure=" + portmapProc);
|
LOG.info("PortmapHandler unknown rpc procedure=" + portmapProc);
|
||||||
RpcAcceptedReply.voidReply(out, xid,
|
RpcAcceptedReply.getInstance(xid,
|
||||||
RpcAcceptedReply.AcceptState.PROC_UNAVAIL);
|
RpcAcceptedReply.AcceptState.PROC_UNAVAIL, new VerifierNone()).write(
|
||||||
|
out);
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,6 +24,8 @@ import static org.junit.Assert.assertTrue;
|
||||||
import java.net.InetAddress;
|
import java.net.InetAddress;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
|
||||||
|
import org.apache.hadoop.oncrpc.security.CredentialsNone;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.jboss.netty.buffer.ByteBufferBackedChannelBuffer;
|
import org.jboss.netty.buffer.ByteBufferBackedChannelBuffer;
|
||||||
import org.jboss.netty.buffer.ChannelBuffer;
|
import org.jboss.netty.buffer.ChannelBuffer;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
|
@ -55,7 +57,8 @@ public class TestFrameDecoder {
|
||||||
InetAddress client, Channel channel) {
|
InetAddress client, Channel channel) {
|
||||||
// Get the final complete request and return a void response.
|
// Get the final complete request and return a void response.
|
||||||
result = in;
|
result = in;
|
||||||
return RpcAcceptedReply.voidReply(out, 1234);
|
RpcAcceptedReply.getAcceptInstance(1234, new VerifierNone()).write(out);
|
||||||
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -161,7 +164,8 @@ public class TestFrameDecoder {
|
||||||
|
|
||||||
static void createPortmapXDRheader(XDR xdr_out, int procedure) {
|
static void createPortmapXDRheader(XDR xdr_out, int procedure) {
|
||||||
// Make this a method
|
// Make this a method
|
||||||
RpcCall.write(xdr_out, 0, 100000, 2, procedure);
|
RpcCall.getInstance(0, 100000, 2, procedure, new CredentialsNone(),
|
||||||
|
new VerifierNone()).write(xdr_out);
|
||||||
}
|
}
|
||||||
|
|
||||||
static XDR createGetportMount() {
|
static XDR createGetportMount() {
|
||||||
|
|
|
@ -47,7 +47,7 @@ public class TestRpcAcceptedReply {
|
||||||
@Test
|
@Test
|
||||||
public void testConstructor() {
|
public void testConstructor() {
|
||||||
Verifier verifier = new VerifierNone();
|
Verifier verifier = new VerifierNone();
|
||||||
RpcAcceptedReply reply = new RpcAcceptedReply(0, RpcMessage.Type.RPC_REPLY,
|
RpcAcceptedReply reply = new RpcAcceptedReply(0,
|
||||||
ReplyState.MSG_ACCEPTED, verifier, AcceptState.SUCCESS);
|
ReplyState.MSG_ACCEPTED, verifier, AcceptState.SUCCESS);
|
||||||
assertEquals(0, reply.getXid());
|
assertEquals(0, reply.getXid());
|
||||||
assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType());
|
assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType());
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.oncrpc;
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.RpcDeniedReply.RejectState;
|
import org.apache.hadoop.oncrpc.RpcDeniedReply.RejectState;
|
||||||
import org.apache.hadoop.oncrpc.RpcReply.ReplyState;
|
import org.apache.hadoop.oncrpc.RpcReply.ReplyState;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -39,10 +40,8 @@ public class TestRpcDeniedReply {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testConstructor() {
|
public void testConstructor() {
|
||||||
RpcDeniedReply reply = new RpcDeniedReply(0, RpcMessage.Type.RPC_REPLY,
|
RpcDeniedReply reply = new RpcDeniedReply(0, ReplyState.MSG_ACCEPTED,
|
||||||
ReplyState.MSG_ACCEPTED, RejectState.AUTH_ERROR) {
|
RejectState.AUTH_ERROR, new VerifierNone());
|
||||||
// Anonymous class
|
|
||||||
};
|
|
||||||
Assert.assertEquals(0, reply.getXid());
|
Assert.assertEquals(0, reply.getXid());
|
||||||
Assert.assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType());
|
Assert.assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType());
|
||||||
Assert.assertEquals(ReplyState.MSG_ACCEPTED, reply.getState());
|
Assert.assertEquals(ReplyState.MSG_ACCEPTED, reply.getState());
|
||||||
|
|
|
@ -26,7 +26,10 @@ import org.junit.Test;
|
||||||
public class TestRpcMessage {
|
public class TestRpcMessage {
|
||||||
private RpcMessage getRpcMessage(int xid, RpcMessage.Type msgType) {
|
private RpcMessage getRpcMessage(int xid, RpcMessage.Type msgType) {
|
||||||
return new RpcMessage(xid, msgType) {
|
return new RpcMessage(xid, msgType) {
|
||||||
// Anonymous class
|
@Override
|
||||||
|
public XDR write(XDR xdr) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.oncrpc;
|
||||||
|
|
||||||
|
|
||||||
import org.apache.hadoop.oncrpc.RpcReply.ReplyState;
|
import org.apache.hadoop.oncrpc.RpcReply.ReplyState;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -39,8 +40,12 @@ public class TestRpcReply {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRpcReply() {
|
public void testRpcReply() {
|
||||||
RpcReply reply = new RpcReply(0, RpcMessage.Type.RPC_REPLY, ReplyState.MSG_ACCEPTED) {
|
RpcReply reply = new RpcReply(0, ReplyState.MSG_ACCEPTED,
|
||||||
// Anonymous class
|
new VerifierNone()) {
|
||||||
|
@Override
|
||||||
|
public XDR write(XDR xdr) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
Assert.assertEquals(0, reply.getXid());
|
Assert.assertEquals(0, reply.getXid());
|
||||||
Assert.assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType());
|
Assert.assertEquals(RpcMessage.Type.RPC_REPLY, reply.getMessageType());
|
||||||
|
|
|
@ -40,6 +40,7 @@ import org.apache.hadoop.oncrpc.RpcAcceptedReply;
|
||||||
import org.apache.hadoop.oncrpc.RpcCall;
|
import org.apache.hadoop.oncrpc.RpcCall;
|
||||||
import org.apache.hadoop.oncrpc.RpcProgram;
|
import org.apache.hadoop.oncrpc.RpcProgram;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -88,7 +89,8 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug("MOUNT NULLOP : " + " client: " + client);
|
LOG.debug("MOUNT NULLOP : " + " client: " + client);
|
||||||
}
|
}
|
||||||
return RpcAcceptedReply.voidReply(out, xid);
|
return RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(
|
||||||
|
out);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -155,7 +157,7 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
|
||||||
|
|
||||||
String host = client.getHostName();
|
String host = client.getHostName();
|
||||||
mounts.remove(new MountEntry(host, path));
|
mounts.remove(new MountEntry(host, path));
|
||||||
RpcAcceptedReply.voidReply(out, xid);
|
RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(out);
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -165,7 +167,8 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
|
||||||
LOG.debug("MOUNT UMNTALL : " + " client: " + client);
|
LOG.debug("MOUNT UMNTALL : " + " client: " + client);
|
||||||
}
|
}
|
||||||
mounts.clear();
|
mounts.clear();
|
||||||
return RpcAcceptedReply.voidReply(out, xid);
|
return RpcAcceptedReply.getAcceptInstance(xid, new VerifierNone()).write(
|
||||||
|
out);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -190,8 +193,9 @@ public class RpcProgramMountd extends RpcProgram implements MountInterface {
|
||||||
out = MountResponse.writeExportList(out, xid, exports, hostsMatchers);
|
out = MountResponse.writeExportList(out, xid, exports, hostsMatchers);
|
||||||
} else {
|
} else {
|
||||||
// Invalid procedure
|
// Invalid procedure
|
||||||
RpcAcceptedReply.voidReply(out, xid,
|
RpcAcceptedReply.getInstance(xid,
|
||||||
RpcAcceptedReply.AcceptState.PROC_UNAVAIL);
|
RpcAcceptedReply.AcceptState.PROC_UNAVAIL, new VerifierNone()).write(
|
||||||
|
out);
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
|
@ -49,6 +49,7 @@ import org.apache.hadoop.nfs.nfs3.response.WRITE3Response;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.WccAttr;
|
import org.apache.hadoop.nfs.nfs3.response.WccAttr;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.WccData;
|
import org.apache.hadoop.nfs.nfs3.response.WccData;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -293,7 +294,8 @@ class OpenFileCtx {
|
||||||
WccData fileWcc = new WccData(latestAttr.getWccAttr(), latestAttr);
|
WccData fileWcc = new WccData(latestAttr.getWccAttr(), latestAttr);
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO,
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO,
|
||||||
fileWcc, 0, request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF);
|
fileWcc, 0, request.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
} else {
|
} else {
|
||||||
// Handle repeated write requests(same xid or not).
|
// Handle repeated write requests(same xid or not).
|
||||||
// If already replied, send reply again. If not replied, drop the
|
// If already replied, send reply again. If not replied, drop the
|
||||||
|
@ -315,7 +317,8 @@ class OpenFileCtx {
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
||||||
fileWcc, request.getCount(), request.getStableHow(),
|
fileWcc, request.getCount(), request.getStableHow(),
|
||||||
Nfs3Constant.WRITE_COMMIT_VERF);
|
Nfs3Constant.WRITE_COMMIT_VERF);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
}
|
}
|
||||||
updateLastAccessTime();
|
updateLastAccessTime();
|
||||||
|
|
||||||
|
@ -369,7 +372,8 @@ class OpenFileCtx {
|
||||||
WccData fileWcc = new WccData(preOpAttr, postOpAttr);
|
WccData fileWcc = new WccData(preOpAttr, postOpAttr);
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
||||||
fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF);
|
fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
writeCtx.setReplied(true);
|
writeCtx.setReplied(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -394,7 +398,8 @@ class OpenFileCtx {
|
||||||
WccData fileWcc = new WccData(preOpAttr, postOpAttr);
|
WccData fileWcc = new WccData(preOpAttr, postOpAttr);
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
||||||
fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF);
|
fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
writeCtx.setReplied(true);
|
writeCtx.setReplied(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -420,7 +425,8 @@ class OpenFileCtx {
|
||||||
}
|
}
|
||||||
|
|
||||||
updateLastAccessTime();
|
updateLastAccessTime();
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -715,7 +721,8 @@ class OpenFileCtx {
|
||||||
WccData fileWcc = new WccData(preOpAttr, latestAttr);
|
WccData fileWcc = new WccData(preOpAttr, latestAttr);
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
||||||
fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF);
|
fileWcc, count, stableHow, Nfs3Constant.WRITE_COMMIT_VERF);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
}
|
}
|
||||||
|
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
@ -723,7 +730,8 @@ class OpenFileCtx {
|
||||||
+ offset + " and length " + data.length, e);
|
+ offset + " and length " + data.length, e);
|
||||||
if (!writeCtx.getReplied()) {
|
if (!writeCtx.getReplied()) {
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO);
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
// Keep stream open. Either client retries or SteamMonitor closes it.
|
// Keep stream open. Either client retries or SteamMonitor closes it.
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -760,8 +768,9 @@ class OpenFileCtx {
|
||||||
WccData fileWcc = new WccData(preOpAttr, latestAttr);
|
WccData fileWcc = new WccData(preOpAttr, latestAttr);
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO,
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO,
|
||||||
fileWcc, 0, writeCtx.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF);
|
fileWcc, 0, writeCtx.getStableHow(), Nfs3Constant.WRITE_COMMIT_VERF);
|
||||||
Nfs3Utils.writeChannel(writeCtx.getChannel(),
|
Nfs3Utils.writeChannel(writeCtx.getChannel(), response
|
||||||
response.send(new XDR(), writeCtx.getXid()), writeCtx.getXid());
|
.writeHeaderAndResponse(new XDR(), writeCtx.getXid(),
|
||||||
|
new VerifierNone()), writeCtx.getXid());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -98,7 +98,6 @@ import org.apache.hadoop.nfs.nfs3.response.RENAME3Response;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.RMDIR3Response;
|
import org.apache.hadoop.nfs.nfs3.response.RMDIR3Response;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.SETATTR3Response;
|
import org.apache.hadoop.nfs.nfs3.response.SETATTR3Response;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response;
|
import org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.VoidResponse;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.WRITE3Response;
|
import org.apache.hadoop.nfs.nfs3.response.WRITE3Response;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.WccAttr;
|
import org.apache.hadoop.nfs.nfs3.response.WccAttr;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.WccData;
|
import org.apache.hadoop.nfs.nfs3.response.WccData;
|
||||||
|
@ -108,12 +107,13 @@ import org.apache.hadoop.oncrpc.RpcDeniedReply;
|
||||||
import org.apache.hadoop.oncrpc.RpcProgram;
|
import org.apache.hadoop.oncrpc.RpcProgram;
|
||||||
import org.apache.hadoop.oncrpc.RpcReply;
|
import org.apache.hadoop.oncrpc.RpcReply;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
import org.apache.hadoop.oncrpc.security.CredentialsSys;
|
|
||||||
import org.apache.hadoop.oncrpc.security.Credentials;
|
import org.apache.hadoop.oncrpc.security.Credentials;
|
||||||
import org.apache.hadoop.oncrpc.security.Verifier;
|
import org.apache.hadoop.oncrpc.security.CredentialsSys;
|
||||||
|
import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
|
||||||
import org.apache.hadoop.oncrpc.security.SecurityHandler;
|
import org.apache.hadoop.oncrpc.security.SecurityHandler;
|
||||||
import org.apache.hadoop.oncrpc.security.SysSecurityHandler;
|
import org.apache.hadoop.oncrpc.security.SysSecurityHandler;
|
||||||
import org.apache.hadoop.oncrpc.security.RpcAuthInfo.AuthFlavor;
|
import org.apache.hadoop.oncrpc.security.Verifier;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.apache.hadoop.security.AccessControlException;
|
import org.apache.hadoop.security.AccessControlException;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
|
|
||||||
|
@ -209,7 +209,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
if (LOG.isDebugEnabled()) {
|
if (LOG.isDebugEnabled()) {
|
||||||
LOG.debug("NFS NULL");
|
LOG.debug("NFS NULL");
|
||||||
}
|
}
|
||||||
return new VoidResponse(Nfs3Status.NFS3_OK);
|
return new NFS3Response(Nfs3Status.NFS3_OK);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -1790,9 +1790,10 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
+ rpcCall.getCredential().getFlavor()
|
+ rpcCall.getCredential().getFlavor()
|
||||||
+ " is not AUTH_SYS or RPCSEC_GSS.");
|
+ " is not AUTH_SYS or RPCSEC_GSS.");
|
||||||
XDR reply = new XDR();
|
XDR reply = new XDR();
|
||||||
reply = RpcDeniedReply.voidReply(reply, xid,
|
RpcDeniedReply rdr = new RpcDeniedReply(xid,
|
||||||
RpcReply.ReplyState.MSG_ACCEPTED,
|
RpcReply.ReplyState.MSG_ACCEPTED,
|
||||||
RpcDeniedReply.RejectState.AUTH_ERROR);
|
RpcDeniedReply.RejectState.AUTH_ERROR, new VerifierNone());
|
||||||
|
rdr.write(reply);
|
||||||
return reply;
|
return reply;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1857,11 +1858,13 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
response = commit(xdr, securityHandler, client);
|
response = commit(xdr, securityHandler, client);
|
||||||
} else {
|
} else {
|
||||||
// Invalid procedure
|
// Invalid procedure
|
||||||
RpcAcceptedReply.voidReply(out, xid,
|
RpcAcceptedReply.getInstance(xid,
|
||||||
RpcAcceptedReply.AcceptState.PROC_UNAVAIL);
|
RpcAcceptedReply.AcceptState.PROC_UNAVAIL, new VerifierNone()).write(
|
||||||
|
out);
|
||||||
}
|
}
|
||||||
if (response != null) {
|
if (response != null) {
|
||||||
out = response.send(out, xid);
|
// TODO: currently we just return VerifierNone
|
||||||
|
out = response.writeHeaderAndResponse(out, xid, new VerifierNone());
|
||||||
}
|
}
|
||||||
|
|
||||||
return out;
|
return out;
|
||||||
|
|
|
@ -39,6 +39,7 @@ import org.apache.hadoop.nfs.nfs3.request.WRITE3Request;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.WRITE3Response;
|
import org.apache.hadoop.nfs.nfs3.response.WRITE3Response;
|
||||||
import org.apache.hadoop.nfs.nfs3.response.WccData;
|
import org.apache.hadoop.nfs.nfs3.response.WccData;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.apache.hadoop.util.Daemon;
|
import org.apache.hadoop.util.Daemon;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
|
|
||||||
|
@ -118,7 +119,8 @@ public class WriteManager {
|
||||||
byte[] data = request.getData().array();
|
byte[] data = request.getData().array();
|
||||||
if (data.length < count) {
|
if (data.length < count) {
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_INVAL);
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -155,7 +157,8 @@ public class WriteManager {
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO,
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO,
|
||||||
fileWcc, count, request.getStableHow(),
|
fileWcc, count, request.getStableHow(),
|
||||||
Nfs3Constant.WRITE_COMMIT_VERF);
|
Nfs3Constant.WRITE_COMMIT_VERF);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -182,10 +185,12 @@ public class WriteManager {
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3_OK,
|
||||||
fileWcc, count, request.getStableHow(),
|
fileWcc, count, request.getStableHow(),
|
||||||
Nfs3Constant.WRITE_COMMIT_VERF);
|
Nfs3Constant.WRITE_COMMIT_VERF);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
} else {
|
} else {
|
||||||
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO);
|
WRITE3Response response = new WRITE3Response(Nfs3Status.NFS3ERR_IO);
|
||||||
Nfs3Utils.writeChannel(channel, response.send(new XDR(), xid), xid);
|
Nfs3Utils.writeChannel(channel, response.writeHeaderAndResponse(
|
||||||
|
new XDR(), xid, new VerifierNone()), xid);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -38,6 +38,8 @@ import org.apache.hadoop.oncrpc.RpcReply;
|
||||||
import org.apache.hadoop.oncrpc.SimpleTcpClient;
|
import org.apache.hadoop.oncrpc.SimpleTcpClient;
|
||||||
import org.apache.hadoop.oncrpc.SimpleTcpClientHandler;
|
import org.apache.hadoop.oncrpc.SimpleTcpClientHandler;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.CredentialsNone;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.jboss.netty.buffer.ChannelBuffer;
|
import org.jboss.netty.buffer.ChannelBuffer;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
import org.jboss.netty.channel.ChannelHandlerContext;
|
import org.jboss.netty.channel.ChannelHandlerContext;
|
||||||
|
@ -58,15 +60,9 @@ public class TestOutOfOrderWrite {
|
||||||
|
|
||||||
static XDR create() {
|
static XDR create() {
|
||||||
XDR request = new XDR();
|
XDR request = new XDR();
|
||||||
RpcCall.write(request, 0x8000004c, Nfs3Constant.PROGRAM,
|
RpcCall.getInstance(0x8000004c, Nfs3Constant.PROGRAM, Nfs3Constant.VERSION,
|
||||||
Nfs3Constant.VERSION, Nfs3Constant.NFSPROC3.CREATE.getValue());
|
Nfs3Constant.NFSPROC3.CREATE.getValue(), new CredentialsNone(),
|
||||||
|
new VerifierNone()).write(request);
|
||||||
// credentials
|
|
||||||
request.writeInt(0); // auth null
|
|
||||||
request.writeInt(0); // length zero
|
|
||||||
// verifier
|
|
||||||
request.writeInt(0); // auth null
|
|
||||||
request.writeInt(0); // length zero
|
|
||||||
|
|
||||||
SetAttr3 objAttr = new SetAttr3();
|
SetAttr3 objAttr = new SetAttr3();
|
||||||
CREATE3Request createReq = new CREATE3Request(new FileHandle("/"),
|
CREATE3Request createReq = new CREATE3Request(new FileHandle("/"),
|
||||||
|
@ -78,15 +74,10 @@ public class TestOutOfOrderWrite {
|
||||||
static XDR write(FileHandle handle, int xid, long offset, int count,
|
static XDR write(FileHandle handle, int xid, long offset, int count,
|
||||||
byte[] data) {
|
byte[] data) {
|
||||||
XDR request = new XDR();
|
XDR request = new XDR();
|
||||||
RpcCall.write(request, xid, Nfs3Constant.PROGRAM, Nfs3Constant.VERSION,
|
RpcCall.getInstance(xid, Nfs3Constant.PROGRAM, Nfs3Constant.VERSION,
|
||||||
Nfs3Constant.NFSPROC3.WRITE.getValue());
|
Nfs3Constant.NFSPROC3.CREATE.getValue(), new CredentialsNone(),
|
||||||
|
new VerifierNone()).write(request);
|
||||||
|
|
||||||
// credentials
|
|
||||||
request.writeInt(0); // auth null
|
|
||||||
request.writeInt(0); // length zero
|
|
||||||
// verifier
|
|
||||||
request.writeInt(0); // auth null
|
|
||||||
request.writeInt(0); // length zero
|
|
||||||
WRITE3Request write1 = new WRITE3Request(handle, offset, count,
|
WRITE3Request write1 = new WRITE3Request(handle, offset, count,
|
||||||
WriteStableHow.UNSTABLE, ByteBuffer.wrap(data));
|
WriteStableHow.UNSTABLE, ByteBuffer.wrap(data));
|
||||||
write1.serialize(request);
|
write1.serialize(request);
|
||||||
|
|
|
@ -26,6 +26,8 @@ import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
|
||||||
import org.apache.hadoop.oncrpc.RegistrationClient;
|
import org.apache.hadoop.oncrpc.RegistrationClient;
|
||||||
import org.apache.hadoop.oncrpc.RpcCall;
|
import org.apache.hadoop.oncrpc.RpcCall;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.CredentialsNone;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
import org.apache.hadoop.portmap.PortmapMapping;
|
import org.apache.hadoop.portmap.PortmapMapping;
|
||||||
import org.apache.hadoop.portmap.PortmapRequest;
|
import org.apache.hadoop.portmap.PortmapRequest;
|
||||||
|
|
||||||
|
@ -78,11 +80,8 @@ public class TestPortmapRegister {
|
||||||
|
|
||||||
static void createPortmapXDRheader(XDR xdr_out, int procedure) {
|
static void createPortmapXDRheader(XDR xdr_out, int procedure) {
|
||||||
// TODO: Move this to RpcRequest
|
// TODO: Move this to RpcRequest
|
||||||
RpcCall.write(xdr_out, 0, 100000, 2, procedure);
|
RpcCall.getInstance(0, 100000, 2, procedure, new CredentialsNone(),
|
||||||
xdr_out.writeInt(0); //no auth
|
new VerifierNone()).write(xdr_out);
|
||||||
xdr_out.writeInt(0);
|
|
||||||
xdr_out.writeInt(0);
|
|
||||||
xdr_out.writeInt(0);
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
xdr_out.putInt(1); //unix auth
|
xdr_out.putInt(1); //unix auth
|
||||||
|
|
|
@ -27,6 +27,8 @@ import java.net.UnknownHostException;
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
|
import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
|
||||||
import org.apache.hadoop.oncrpc.RpcCall;
|
import org.apache.hadoop.oncrpc.RpcCall;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.CredentialsNone;
|
||||||
|
import org.apache.hadoop.oncrpc.security.VerifierNone;
|
||||||
|
|
||||||
// TODO: convert this to Junit
|
// TODO: convert this to Junit
|
||||||
public class TestUdpServer {
|
public class TestUdpServer {
|
||||||
|
@ -82,7 +84,8 @@ public class TestUdpServer {
|
||||||
|
|
||||||
static void createPortmapXDRheader(XDR xdr_out, int procedure) {
|
static void createPortmapXDRheader(XDR xdr_out, int procedure) {
|
||||||
// Make this a method
|
// Make this a method
|
||||||
RpcCall.write(xdr_out, 0, 100000, 2, procedure);
|
RpcCall.getInstance(0, 100000, 2, procedure, new CredentialsNone(),
|
||||||
|
new VerifierNone()).write(xdr_out);
|
||||||
}
|
}
|
||||||
|
|
||||||
static void testGetportMount() {
|
static void testGetportMount() {
|
||||||
|
|
|
@ -158,6 +158,9 @@ Release 2.1.1-beta - 2013-09-23
|
||||||
|
|
||||||
HDFS-4680. Audit logging of delegation tokens for MR tracing. (Andrew Wang)
|
HDFS-4680. Audit logging of delegation tokens for MR tracing. (Andrew Wang)
|
||||||
|
|
||||||
|
HDFS-5212. Refactor RpcMessage and NFS3Response to support different
|
||||||
|
types of authentication information. (jing9)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
|
|
Loading…
Reference in New Issue