HDFS-6892. Add XDR packaging method for each NFS request. Contributed by Brandon Li
(cherry picked from commit cd9182d8b5
)
This commit is contained in:
parent
c8b254d70e
commit
99d8434511
|
@ -19,13 +19,24 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* ACCESS3 Request
|
* ACCESS3 Request
|
||||||
*/
|
*/
|
||||||
public class ACCESS3Request extends RequestWithHandle {
|
public class ACCESS3Request extends RequestWithHandle {
|
||||||
public ACCESS3Request(XDR xdr) throws IOException {
|
public static ACCESS3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
|
return new ACCESS3Request(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
public ACCESS3Request(FileHandle handle) {
|
||||||
|
super(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -28,10 +29,17 @@ public class COMMIT3Request extends RequestWithHandle {
|
||||||
private final long offset;
|
private final long offset;
|
||||||
private final int count;
|
private final int count;
|
||||||
|
|
||||||
public COMMIT3Request(XDR xdr) throws IOException {
|
public static COMMIT3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
offset = xdr.readHyper();
|
long offset = xdr.readHyper();
|
||||||
count = xdr.readInt();
|
int count = xdr.readInt();
|
||||||
|
return new COMMIT3Request(handle, offset, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
public COMMIT3Request(FileHandle handle, long offset, int count) {
|
||||||
|
super(handle);
|
||||||
|
this.offset = offset;
|
||||||
|
this.count = count;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getOffset() {
|
public long getOffset() {
|
||||||
|
@ -41,4 +49,11 @@ public class COMMIT3Request extends RequestWithHandle {
|
||||||
public int getCount() {
|
public int getCount() {
|
||||||
return this.count;
|
return this.count;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
|
xdr.writeLongAsHyper(offset);
|
||||||
|
xdr.writeInt(count);
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -29,8 +29,8 @@ import org.apache.hadoop.oncrpc.XDR;
|
||||||
public class CREATE3Request extends RequestWithHandle {
|
public class CREATE3Request extends RequestWithHandle {
|
||||||
private final String name;
|
private final String name;
|
||||||
private final int mode;
|
private final int mode;
|
||||||
private SetAttr3 objAttr = null;
|
private final SetAttr3 objAttr;
|
||||||
private long verf;
|
private long verf = 0;
|
||||||
|
|
||||||
public CREATE3Request(FileHandle handle, String name, int mode,
|
public CREATE3Request(FileHandle handle, String name, int mode,
|
||||||
SetAttr3 objAttr, long verf) {
|
SetAttr3 objAttr, long verf) {
|
||||||
|
@ -41,12 +41,12 @@ public class CREATE3Request extends RequestWithHandle {
|
||||||
this.verf = verf;
|
this.verf = verf;
|
||||||
}
|
}
|
||||||
|
|
||||||
public CREATE3Request(XDR xdr) throws IOException {
|
public static CREATE3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
name = xdr.readString();
|
String name = xdr.readString();
|
||||||
mode = xdr.readInt();
|
int mode = xdr.readInt();
|
||||||
|
SetAttr3 objAttr = new SetAttr3();
|
||||||
objAttr = new SetAttr3();
|
long verf = 0;
|
||||||
if ((mode == Nfs3Constant.CREATE_UNCHECKED)
|
if ((mode == Nfs3Constant.CREATE_UNCHECKED)
|
||||||
|| (mode == Nfs3Constant.CREATE_GUARDED)) {
|
|| (mode == Nfs3Constant.CREATE_GUARDED)) {
|
||||||
objAttr.deserialize(xdr);
|
objAttr.deserialize(xdr);
|
||||||
|
@ -55,6 +55,7 @@ public class CREATE3Request extends RequestWithHandle {
|
||||||
} else {
|
} else {
|
||||||
throw new IOException("Wrong create mode:" + mode);
|
throw new IOException("Wrong create mode:" + mode);
|
||||||
}
|
}
|
||||||
|
return new CREATE3Request(handle, name, mode, objAttr, verf);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
|
@ -81,4 +82,5 @@ public class CREATE3Request extends RequestWithHandle {
|
||||||
xdr.writeInt(mode);
|
xdr.writeInt(mode);
|
||||||
objAttr.serialize(xdr);
|
objAttr.serialize(xdr);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
|
@ -19,13 +19,24 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* FSINFO3 Request
|
* FSINFO3 Request
|
||||||
*/
|
*/
|
||||||
public class FSINFO3Request extends RequestWithHandle {
|
public class FSINFO3Request extends RequestWithHandle {
|
||||||
public FSINFO3Request(XDR xdr) throws IOException {
|
public static FSINFO3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
|
return new FSINFO3Request(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
public FSINFO3Request(FileHandle handle) {
|
||||||
|
super(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -19,13 +19,24 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* FSSTAT3 Request
|
* FSSTAT3 Request
|
||||||
*/
|
*/
|
||||||
public class FSSTAT3Request extends RequestWithHandle {
|
public class FSSTAT3Request extends RequestWithHandle {
|
||||||
public FSSTAT3Request(XDR xdr) throws IOException {
|
public static FSSTAT3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
|
return new FSSTAT3Request(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
public FSSTAT3Request(FileHandle handle) {
|
||||||
|
super(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -19,13 +19,24 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* GETATTR3 Request
|
* GETATTR3 Request
|
||||||
*/
|
*/
|
||||||
public class GETATTR3Request extends RequestWithHandle {
|
public class GETATTR3Request extends RequestWithHandle {
|
||||||
public GETATTR3Request(XDR xdr) throws IOException {
|
public static GETATTR3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
|
return new GETATTR3Request(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
public GETATTR3Request(FileHandle handle) {
|
||||||
|
super(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -35,9 +35,10 @@ public class LOOKUP3Request extends RequestWithHandle {
|
||||||
this.name = name;
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public LOOKUP3Request(XDR xdr) throws IOException {
|
public static LOOKUP3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
name = xdr.readString();
|
String name = xdr.readString();
|
||||||
|
return new LOOKUP3Request(handle, name);
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
|
@ -51,7 +52,7 @@ public class LOOKUP3Request extends RequestWithHandle {
|
||||||
@Override
|
@Override
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
public void serialize(XDR xdr) {
|
public void serialize(XDR xdr) {
|
||||||
super.serialize(xdr);
|
handle.serialize(xdr);
|
||||||
xdr.writeInt(name.getBytes().length);
|
xdr.writeInt(name.getBytes().length);
|
||||||
xdr.writeFixedOpaque(name.getBytes());
|
xdr.writeFixedOpaque(name.getBytes());
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -28,11 +29,18 @@ public class MKDIR3Request extends RequestWithHandle {
|
||||||
private final String name;
|
private final String name;
|
||||||
private final SetAttr3 objAttr;
|
private final SetAttr3 objAttr;
|
||||||
|
|
||||||
public MKDIR3Request(XDR xdr) throws IOException {
|
public static MKDIR3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
name = xdr.readString();
|
String name = xdr.readString();
|
||||||
objAttr = new SetAttr3();
|
SetAttr3 objAttr = new SetAttr3();
|
||||||
objAttr.deserialize(xdr);
|
objAttr.deserialize(xdr);
|
||||||
|
return new MKDIR3Request(handle, name, objAttr);
|
||||||
|
}
|
||||||
|
|
||||||
|
public MKDIR3Request(FileHandle handle, String name, SetAttr3 objAttr) {
|
||||||
|
super(handle);
|
||||||
|
this.name = name;
|
||||||
|
this.objAttr = objAttr;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
|
@ -42,4 +50,12 @@ public class MKDIR3Request extends RequestWithHandle {
|
||||||
public SetAttr3 getObjAttr() {
|
public SetAttr3 getObjAttr() {
|
||||||
return objAttr;
|
return objAttr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
|
xdr.writeInt(name.getBytes().length);
|
||||||
|
xdr.writeFixedOpaque(name.getBytes());
|
||||||
|
objAttr.serialize(xdr);
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* An NFS request that uses {@link FileHandle} to identify a file.
|
||||||
|
*/
|
||||||
|
public abstract class NFS3Request {
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Deserialize a handle from an XDR object
|
||||||
|
*/
|
||||||
|
static FileHandle readHandle(XDR xdr) throws IOException {
|
||||||
|
FileHandle handle = new FileHandle();
|
||||||
|
if (!handle.deserialize(xdr)) {
|
||||||
|
throw new IOException("can't deserialize file handle");
|
||||||
|
}
|
||||||
|
return handle;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Subclass should implement. Usually handle is the first to be serialized
|
||||||
|
*/
|
||||||
|
public abstract void serialize(XDR xdr);
|
||||||
|
}
|
|
@ -19,13 +19,24 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* PATHCONF3 Request
|
* PATHCONF3 Request
|
||||||
*/
|
*/
|
||||||
public class PATHCONF3Request extends RequestWithHandle {
|
public class PATHCONF3Request extends RequestWithHandle {
|
||||||
public PATHCONF3Request(XDR xdr) throws IOException {
|
public static PATHCONF3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
|
return new PATHCONF3Request(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
public PATHCONF3Request(FileHandle handle) {
|
||||||
|
super(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,10 +31,11 @@ public class READ3Request extends RequestWithHandle {
|
||||||
private final long offset;
|
private final long offset;
|
||||||
private final int count;
|
private final int count;
|
||||||
|
|
||||||
public READ3Request(XDR xdr) throws IOException {
|
public static READ3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
offset = xdr.readHyper();
|
long offset = xdr.readHyper();
|
||||||
count = xdr.readInt();
|
int count = xdr.readInt();
|
||||||
|
return new READ3Request(handle, offset, count);
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
@VisibleForTesting
|
||||||
|
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -29,11 +30,20 @@ public class READDIR3Request extends RequestWithHandle {
|
||||||
private final long cookieVerf;
|
private final long cookieVerf;
|
||||||
private final int count;
|
private final int count;
|
||||||
|
|
||||||
public READDIR3Request(XDR xdr) throws IOException {
|
public static READDIR3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
cookie = xdr.readHyper();
|
long cookie = xdr.readHyper();
|
||||||
cookieVerf = xdr.readHyper();
|
long cookieVerf = xdr.readHyper();
|
||||||
count = xdr.readInt();
|
int count = xdr.readInt();
|
||||||
|
return new READDIR3Request(handle, cookie, cookieVerf, count);
|
||||||
|
}
|
||||||
|
|
||||||
|
public READDIR3Request(FileHandle handle, long cookie, long cookieVerf,
|
||||||
|
int count) {
|
||||||
|
super(handle);
|
||||||
|
this.cookie = cookie;
|
||||||
|
this.cookieVerf = cookieVerf;
|
||||||
|
this.count = count;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getCookie() {
|
public long getCookie() {
|
||||||
|
@ -47,4 +57,12 @@ public class READDIR3Request extends RequestWithHandle {
|
||||||
public long getCount() {
|
public long getCount() {
|
||||||
return this.count;
|
return this.count;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
|
xdr.writeLongAsHyper(cookie);
|
||||||
|
xdr.writeLongAsHyper(cookieVerf);
|
||||||
|
xdr.writeInt(count);
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -30,12 +31,23 @@ public class READDIRPLUS3Request extends RequestWithHandle {
|
||||||
private final int dirCount;
|
private final int dirCount;
|
||||||
private final int maxCount;
|
private final int maxCount;
|
||||||
|
|
||||||
public READDIRPLUS3Request(XDR xdr) throws IOException {
|
public static READDIRPLUS3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
cookie = xdr.readHyper();
|
long cookie = xdr.readHyper();
|
||||||
cookieVerf = xdr.readHyper();
|
long cookieVerf = xdr.readHyper();
|
||||||
dirCount = xdr.readInt();
|
int dirCount = xdr.readInt();
|
||||||
maxCount = xdr.readInt();
|
int maxCount = xdr.readInt();
|
||||||
|
return new READDIRPLUS3Request(handle, cookie, cookieVerf, dirCount,
|
||||||
|
maxCount);
|
||||||
|
}
|
||||||
|
|
||||||
|
public READDIRPLUS3Request(FileHandle handle, long cookie, long cookieVerf,
|
||||||
|
int dirCount, int maxCount) {
|
||||||
|
super(handle);
|
||||||
|
this.cookie = cookie;
|
||||||
|
this.cookieVerf = cookieVerf;
|
||||||
|
this.dirCount = dirCount;
|
||||||
|
this.maxCount = maxCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
public long getCookie() {
|
public long getCookie() {
|
||||||
|
@ -53,4 +65,13 @@ public class READDIRPLUS3Request extends RequestWithHandle {
|
||||||
public int getMaxCount() {
|
public int getMaxCount() {
|
||||||
return maxCount;
|
return maxCount;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
|
xdr.writeLongAsHyper(cookie);
|
||||||
|
xdr.writeLongAsHyper(cookieVerf);
|
||||||
|
xdr.writeInt(dirCount);
|
||||||
|
xdr.writeInt(maxCount);
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -26,7 +27,17 @@ import org.apache.hadoop.oncrpc.XDR;
|
||||||
*/
|
*/
|
||||||
public class READLINK3Request extends RequestWithHandle {
|
public class READLINK3Request extends RequestWithHandle {
|
||||||
|
|
||||||
public READLINK3Request(XDR xdr) throws IOException {
|
public static READLINK3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
|
return new READLINK3Request(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
public READLINK3Request(FileHandle handle) {
|
||||||
|
super(handle);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -27,12 +28,25 @@ import org.apache.hadoop.oncrpc.XDR;
|
||||||
public class REMOVE3Request extends RequestWithHandle {
|
public class REMOVE3Request extends RequestWithHandle {
|
||||||
private final String name;
|
private final String name;
|
||||||
|
|
||||||
public REMOVE3Request(XDR xdr) throws IOException {
|
public static REMOVE3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
name = xdr.readString();
|
String name = xdr.readString();
|
||||||
|
return new REMOVE3Request(handle, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public REMOVE3Request(FileHandle handle, String name) {
|
||||||
|
super(handle);
|
||||||
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
return this.name;
|
return this.name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
|
xdr.writeInt(name.getBytes().length);
|
||||||
|
xdr.writeFixedOpaque(name.getBytes());
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -25,23 +25,26 @@ import org.apache.hadoop.oncrpc.XDR;
|
||||||
/**
|
/**
|
||||||
* RENAME3 Request
|
* RENAME3 Request
|
||||||
*/
|
*/
|
||||||
public class RENAME3Request {
|
public class RENAME3Request extends NFS3Request {
|
||||||
private final FileHandle fromDirHandle;
|
private final FileHandle fromDirHandle;
|
||||||
private final String fromName;
|
private final String fromName;
|
||||||
private final FileHandle toDirHandle;
|
private final FileHandle toDirHandle;
|
||||||
private final String toName;
|
private final String toName;
|
||||||
|
|
||||||
public RENAME3Request(XDR xdr) throws IOException {
|
public static RENAME3Request deserialize(XDR xdr) throws IOException {
|
||||||
fromDirHandle = new FileHandle();
|
FileHandle fromDirHandle = readHandle(xdr);
|
||||||
if (!fromDirHandle.deserialize(xdr)) {
|
String fromName = xdr.readString();
|
||||||
throw new IOException("can't deserialize file handle");
|
FileHandle toDirHandle = readHandle(xdr);
|
||||||
}
|
String toName = xdr.readString();
|
||||||
fromName = xdr.readString();
|
return new RENAME3Request(fromDirHandle, fromName, toDirHandle, toName);
|
||||||
toDirHandle = new FileHandle();
|
}
|
||||||
if (!toDirHandle.deserialize(xdr)) {
|
|
||||||
throw new IOException("can't deserialize file handle");
|
public RENAME3Request(FileHandle fromDirHandle, String fromName,
|
||||||
}
|
FileHandle toDirHandle, String toName) {
|
||||||
toName = xdr.readString();
|
this.fromDirHandle = fromDirHandle;
|
||||||
|
this.fromName = fromName;
|
||||||
|
this.toDirHandle = toDirHandle;
|
||||||
|
this.toName = toName;
|
||||||
}
|
}
|
||||||
|
|
||||||
public FileHandle getFromDirHandle() {
|
public FileHandle getFromDirHandle() {
|
||||||
|
@ -59,4 +62,14 @@ public class RENAME3Request {
|
||||||
public String getToName() {
|
public String getToName() {
|
||||||
return toName;
|
return toName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
fromDirHandle.serialize(xdr);
|
||||||
|
xdr.writeInt(fromName.getBytes().length);
|
||||||
|
xdr.writeFixedOpaque(fromName.getBytes());
|
||||||
|
toDirHandle.serialize(xdr);
|
||||||
|
xdr.writeInt(toName.getBytes().length);
|
||||||
|
xdr.writeFixedOpaque(toName.getBytes());
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -27,12 +28,25 @@ import org.apache.hadoop.oncrpc.XDR;
|
||||||
public class RMDIR3Request extends RequestWithHandle {
|
public class RMDIR3Request extends RequestWithHandle {
|
||||||
private final String name;
|
private final String name;
|
||||||
|
|
||||||
public RMDIR3Request(XDR xdr) throws IOException {
|
public static RMDIR3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
name = xdr.readString();
|
String name = xdr.readString();
|
||||||
|
return new RMDIR3Request(handle, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
public RMDIR3Request(FileHandle handle, String name) {
|
||||||
|
super(handle);
|
||||||
|
this.name = name;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
return this.name;
|
return this.name;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
|
xdr.writeInt(name.getBytes().length);
|
||||||
|
xdr.writeFixedOpaque(name.getBytes());
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -17,33 +17,19 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.nfs.nfs3.request;
|
package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
|
|
||||||
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An NFS request that uses {@link FileHandle} to identify a file.
|
* An NFS request that uses {@link FileHandle} to identify a file.
|
||||||
*/
|
*/
|
||||||
public class RequestWithHandle {
|
public abstract class RequestWithHandle extends NFS3Request {
|
||||||
protected final FileHandle handle;
|
protected final FileHandle handle;
|
||||||
|
|
||||||
RequestWithHandle(FileHandle handle) {
|
RequestWithHandle(FileHandle handle) {
|
||||||
this.handle = handle;
|
this.handle = handle;
|
||||||
}
|
}
|
||||||
|
|
||||||
RequestWithHandle(XDR xdr) throws IOException {
|
|
||||||
handle = new FileHandle();
|
|
||||||
if (!handle.deserialize(xdr)) {
|
|
||||||
throw new IOException("can't deserialize file handle");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public FileHandle getHandle() {
|
public FileHandle getHandle() {
|
||||||
return this.handle;
|
return this.handle;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void serialize(XDR xdr) {
|
|
||||||
handle.serialize(xdr);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.nfs.NfsTime;
|
import org.apache.hadoop.nfs.NfsTime;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -38,16 +39,26 @@ public class SETATTR3Request extends RequestWithHandle {
|
||||||
private final boolean check;
|
private final boolean check;
|
||||||
private final NfsTime ctime;
|
private final NfsTime ctime;
|
||||||
|
|
||||||
public SETATTR3Request(XDR xdr) throws IOException {
|
public static SETATTR3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
attr = new SetAttr3();
|
SetAttr3 attr = new SetAttr3();
|
||||||
attr.deserialize(xdr);
|
attr.deserialize(xdr);
|
||||||
check = xdr.readBoolean();
|
boolean check = xdr.readBoolean();
|
||||||
|
NfsTime ctime;
|
||||||
if (check) {
|
if (check) {
|
||||||
ctime = NfsTime.deserialize(xdr);
|
ctime = NfsTime.deserialize(xdr);
|
||||||
} else {
|
} else {
|
||||||
ctime = null;
|
ctime = null;
|
||||||
}
|
}
|
||||||
|
return new SETATTR3Request(handle, attr, check, ctime);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SETATTR3Request(FileHandle handle, SetAttr3 attr, boolean check,
|
||||||
|
NfsTime ctime) {
|
||||||
|
super(handle);
|
||||||
|
this.attr = attr;
|
||||||
|
this.check = check;
|
||||||
|
this.ctime = ctime;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SetAttr3 getAttr() {
|
public SetAttr3 getAttr() {
|
||||||
|
@ -61,4 +72,14 @@ public class SETATTR3Request extends RequestWithHandle {
|
||||||
public NfsTime getCtime() {
|
public NfsTime getCtime() {
|
||||||
return ctime;
|
return ctime;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
|
attr.serialize(xdr);
|
||||||
|
xdr.writeBoolean(check);
|
||||||
|
if (check) {
|
||||||
|
ctime.serialize(xdr);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -29,12 +30,21 @@ public class SYMLINK3Request extends RequestWithHandle {
|
||||||
private final SetAttr3 symAttr;
|
private final SetAttr3 symAttr;
|
||||||
private final String symData; // It contains the target
|
private final String symData; // It contains the target
|
||||||
|
|
||||||
public SYMLINK3Request(XDR xdr) throws IOException {
|
public static SYMLINK3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
name = xdr.readString();
|
String name = xdr.readString();
|
||||||
symAttr = new SetAttr3();
|
SetAttr3 symAttr = new SetAttr3();
|
||||||
symAttr.deserialize(xdr);
|
symAttr.deserialize(xdr);
|
||||||
symData = xdr.readString();
|
String symData = xdr.readString();
|
||||||
|
return new SYMLINK3Request(handle, name, symAttr, symData);
|
||||||
|
}
|
||||||
|
|
||||||
|
public SYMLINK3Request(FileHandle handle, String name, SetAttr3 symAttr,
|
||||||
|
String symData) {
|
||||||
|
super(handle);
|
||||||
|
this.name = name;
|
||||||
|
this.symAttr = symAttr;
|
||||||
|
this.symData = symData;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
public String getName() {
|
||||||
|
@ -48,4 +58,14 @@ public class SYMLINK3Request extends RequestWithHandle {
|
||||||
public String getSymData() {
|
public String getSymData() {
|
||||||
return symData;
|
return symData;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void serialize(XDR xdr) {
|
||||||
|
handle.serialize(xdr);
|
||||||
|
xdr.writeInt(name.getBytes().length);
|
||||||
|
xdr.writeFixedOpaque(name.getBytes());
|
||||||
|
symAttr.serialize(xdr);
|
||||||
|
xdr.writeInt(symData.getBytes().length);
|
||||||
|
xdr.writeFixedOpaque(symData.getBytes());
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -53,6 +53,15 @@ public class SetAttr3 {
|
||||||
updateFields = EnumSet.noneOf(SetAttrField.class);
|
updateFields = EnumSet.noneOf(SetAttrField.class);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public SetAttr3(int mode, int uid, int gid, long size, NfsTime atime,
|
||||||
|
NfsTime mtime, EnumSet<SetAttrField> updateFields) {
|
||||||
|
this.mode = mode;
|
||||||
|
this.uid = uid;
|
||||||
|
this.gid = gid;
|
||||||
|
this.size = size;
|
||||||
|
this.updateFields = updateFields;
|
||||||
|
}
|
||||||
|
|
||||||
public int getMode() {
|
public int getMode() {
|
||||||
return mode;
|
return mode;
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,12 +33,13 @@ public class WRITE3Request extends RequestWithHandle {
|
||||||
private final WriteStableHow stableHow;
|
private final WriteStableHow stableHow;
|
||||||
private final ByteBuffer data;
|
private final ByteBuffer data;
|
||||||
|
|
||||||
public WRITE3Request(XDR xdr) throws IOException {
|
public static WRITE3Request deserialize(XDR xdr) throws IOException {
|
||||||
super(xdr);
|
FileHandle handle = readHandle(xdr);
|
||||||
offset = xdr.readHyper();
|
long offset = xdr.readHyper();
|
||||||
count = xdr.readInt();
|
int count = xdr.readInt();
|
||||||
stableHow = WriteStableHow.fromValue(xdr.readInt());
|
WriteStableHow stableHow = WriteStableHow.fromValue(xdr.readInt());
|
||||||
data = ByteBuffer.wrap(xdr.readFixedOpaque(xdr.readInt()));
|
ByteBuffer data = ByteBuffer.wrap(xdr.readFixedOpaque(xdr.readInt()));
|
||||||
|
return new WRITE3Request(handle, offset, count, stableHow, data);
|
||||||
}
|
}
|
||||||
|
|
||||||
public WRITE3Request(FileHandle handle, final long offset, final int count,
|
public WRITE3Request(FileHandle handle, final long offset, final int count,
|
||||||
|
|
|
@ -268,7 +268,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
GETATTR3Request request = null;
|
GETATTR3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new GETATTR3Request(xdr);
|
request = GETATTR3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid GETATTR request");
|
LOG.error("Invalid GETATTR request");
|
||||||
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
|
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -360,7 +360,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
SETATTR3Request request = null;
|
SETATTR3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new SETATTR3Request(xdr);
|
request = SETATTR3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid SETATTR request");
|
LOG.error("Invalid SETATTR request");
|
||||||
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
|
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -445,7 +445,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
LOOKUP3Request request = null;
|
LOOKUP3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new LOOKUP3Request(xdr);
|
request = LOOKUP3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid LOOKUP request");
|
LOG.error("Invalid LOOKUP request");
|
||||||
return new LOOKUP3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new LOOKUP3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -513,7 +513,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
ACCESS3Request request = null;
|
ACCESS3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new ACCESS3Request(xdr);
|
request = ACCESS3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid ACCESS request");
|
LOG.error("Invalid ACCESS request");
|
||||||
return new ACCESS3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new ACCESS3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -581,7 +581,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
READLINK3Request request = null;
|
READLINK3Request request = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
request = new READLINK3Request(xdr);
|
request = READLINK3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid READLINK request");
|
LOG.error("Invalid READLINK request");
|
||||||
return new READLINK3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new READLINK3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -655,7 +655,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
READ3Request request = null;
|
READ3Request request = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
request = new READ3Request(xdr);
|
request = READ3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid READ request");
|
LOG.error("Invalid READ request");
|
||||||
return new READ3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new READ3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -788,7 +788,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
WRITE3Request request = null;
|
WRITE3Request request = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
request = new WRITE3Request(xdr);
|
request = WRITE3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid WRITE request");
|
LOG.error("Invalid WRITE request");
|
||||||
return new WRITE3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new WRITE3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -870,7 +870,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
CREATE3Request request = null;
|
CREATE3Request request = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
request = new CREATE3Request(xdr);
|
request = CREATE3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid CREATE request");
|
LOG.error("Invalid CREATE request");
|
||||||
return new CREATE3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new CREATE3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1003,7 +1003,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
MKDIR3Request request = null;
|
MKDIR3Request request = null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
request = new MKDIR3Request(xdr);
|
request = MKDIR3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid MKDIR request");
|
LOG.error("Invalid MKDIR request");
|
||||||
return new MKDIR3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new MKDIR3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1099,7 +1099,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
REMOVE3Request request = null;
|
REMOVE3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new REMOVE3Request(xdr);
|
request = REMOVE3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid REMOVE request");
|
LOG.error("Invalid REMOVE request");
|
||||||
return new REMOVE3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new REMOVE3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1179,7 +1179,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
RMDIR3Request request = null;
|
RMDIR3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new RMDIR3Request(xdr);
|
request = RMDIR3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid RMDIR request");
|
LOG.error("Invalid RMDIR request");
|
||||||
return new RMDIR3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new RMDIR3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1264,7 +1264,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
RENAME3Request request = null;
|
RENAME3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new RENAME3Request(xdr);
|
request = RENAME3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid RENAME request");
|
LOG.error("Invalid RENAME request");
|
||||||
return new RENAME3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new RENAME3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1360,7 +1360,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
SYMLINK3Request request = null;
|
SYMLINK3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new SYMLINK3Request(xdr);
|
request = SYMLINK3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid SYMLINK request");
|
LOG.error("Invalid SYMLINK request");
|
||||||
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
|
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1453,7 +1453,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
READDIR3Request request = null;
|
READDIR3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new READDIR3Request(xdr);
|
request = READDIR3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid READDIR request");
|
LOG.error("Invalid READDIR request");
|
||||||
return new READDIR3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new READDIR3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1611,7 +1611,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
READDIRPLUS3Request request = null;
|
READDIRPLUS3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new READDIRPLUS3Request(xdr);
|
request = READDIRPLUS3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid READDIRPLUS request");
|
LOG.error("Invalid READDIRPLUS request");
|
||||||
return new READDIRPLUS3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new READDIRPLUS3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1788,7 +1788,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
FSSTAT3Request request = null;
|
FSSTAT3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new FSSTAT3Request(xdr);
|
request = FSSTAT3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid FSSTAT request");
|
LOG.error("Invalid FSSTAT request");
|
||||||
return new FSSTAT3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new FSSTAT3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1862,7 +1862,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
FSINFO3Request request = null;
|
FSINFO3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new FSINFO3Request(xdr);
|
request = FSINFO3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid FSINFO request");
|
LOG.error("Invalid FSINFO request");
|
||||||
return new FSINFO3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new FSINFO3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1926,7 +1926,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
PATHCONF3Request request = null;
|
PATHCONF3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new PATHCONF3Request(xdr);
|
request = PATHCONF3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid PATHCONF request");
|
LOG.error("Invalid PATHCONF request");
|
||||||
return new PATHCONF3Response(Nfs3Status.NFS3ERR_INVAL);
|
return new PATHCONF3Response(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
@ -1977,7 +1977,7 @@ public class RpcProgramNfs3 extends RpcProgram implements Nfs3Interface {
|
||||||
|
|
||||||
COMMIT3Request request = null;
|
COMMIT3Request request = null;
|
||||||
try {
|
try {
|
||||||
request = new COMMIT3Request(xdr);
|
request = COMMIT3Request.deserialize(xdr);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
LOG.error("Invalid COMMIT request");
|
LOG.error("Invalid COMMIT request");
|
||||||
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
|
response.setStatus(Nfs3Status.NFS3ERR_INVAL);
|
||||||
|
|
|
@ -17,12 +17,71 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hdfs.nfs.nfs3;
|
package org.apache.hadoop.hdfs.nfs.nfs3;
|
||||||
|
|
||||||
import static org.junit.Assert.assertTrue;
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.net.InetSocketAddress;
|
import java.net.InetSocketAddress;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
import java.util.EnumSet;
|
||||||
|
|
||||||
|
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||||
|
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||||
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
import org.apache.hadoop.hdfs.nfs.conf.NfsConfigKeys;
|
||||||
|
import org.apache.hadoop.hdfs.nfs.conf.NfsConfiguration;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
|
||||||
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.Nfs3Constant.WriteStableHow;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.ACCESS3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.COMMIT3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.CREATE3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.FSINFO3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.FSSTAT3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.GETATTR3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.LOOKUP3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.MKDIR3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.PATHCONF3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.READ3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.READDIR3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.READDIRPLUS3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.READLINK3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.REMOVE3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.RENAME3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.RMDIR3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.SETATTR3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.SYMLINK3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.SetAttr3;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.SetAttr3.SetAttrField;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.request.WRITE3Request;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.ACCESS3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.COMMIT3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.CREATE3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.FSINFO3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.FSSTAT3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.GETATTR3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.LOOKUP3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.MKDIR3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.PATHCONF3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.READ3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.READDIR3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.READDIRPLUS3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.READLINK3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.REMOVE3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.RENAME3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.RMDIR3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.SETATTR3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response;
|
||||||
|
import org.apache.hadoop.nfs.nfs3.response.WRITE3Response;
|
||||||
|
import org.apache.hadoop.oncrpc.XDR;
|
||||||
|
import org.apache.hadoop.oncrpc.security.SecurityHandler;
|
||||||
|
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
|
||||||
|
import org.apache.hadoop.security.authorize.ProxyUsers;
|
||||||
import org.jboss.netty.channel.Channel;
|
import org.jboss.netty.channel.Channel;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
|
@ -31,46 +90,6 @@ import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
|
||||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
|
||||||
import org.apache.hadoop.hdfs.nfs.conf.NfsConfiguration;
|
|
||||||
import org.apache.hadoop.hdfs.nfs.conf.NfsConfigKeys;
|
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
|
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.FileHandle;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Constant.WriteStableHow;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.Nfs3Status;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.request.LOOKUP3Request;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.request.READ3Request;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.request.WRITE3Request;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.ACCESS3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.COMMIT3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.CREATE3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.FSSTAT3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.FSINFO3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.GETATTR3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.LOOKUP3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.PATHCONF3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.READ3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.REMOVE3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.RMDIR3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.RENAME3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.READDIR3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.READDIRPLUS3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.READLINK3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.SETATTR3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.SYMLINK3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.response.WRITE3Response;
|
|
||||||
import org.apache.hadoop.nfs.nfs3.request.SetAttr3;
|
|
||||||
import org.apache.hadoop.oncrpc.XDR;
|
|
||||||
import org.apache.hadoop.oncrpc.security.SecurityHandler;
|
|
||||||
import org.apache.hadoop.security.authorize.DefaultImpersonationProvider;
|
|
||||||
import org.apache.hadoop.security.authorize.ProxyUsers;
|
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests for {@link RpcProgramNfs3}
|
* Tests for {@link RpcProgramNfs3}
|
||||||
|
@ -143,7 +162,8 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
handle.serialize(xdr_req);
|
GETATTR3Request req = new GETATTR3Request(handle);
|
||||||
|
req.serialize(xdr_req);
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unpriviledged user should fail.
|
||||||
GETATTR3Response response1 = nfsd.getattr(xdr_req.asReadOnlyWrap(),
|
GETATTR3Response response1 = nfsd.getattr(xdr_req.asReadOnlyWrap(),
|
||||||
|
@ -165,13 +185,12 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
handle.serialize(xdr_req);
|
SetAttr3 symAttr = new SetAttr3(0, 1, 0, 0, null, null,
|
||||||
xdr_req.writeString("bar");
|
EnumSet.of(SetAttrField.UID));
|
||||||
SetAttr3 symAttr = new SetAttr3();
|
SETATTR3Request req = new SETATTR3Request(handle, symAttr, false, null);
|
||||||
symAttr.serialize(xdr_req);
|
req.serialize(xdr_req);
|
||||||
xdr_req.writeBoolean(false);
|
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unprivileged user should fail.
|
||||||
SETATTR3Response response1 = nfsd.setattr(xdr_req.asReadOnlyWrap(),
|
SETATTR3Response response1 = nfsd.setattr(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandlerUnpriviledged,
|
securityHandlerUnpriviledged,
|
||||||
new InetSocketAddress("localhost", 1234));
|
new InetSocketAddress("localhost", 1234));
|
||||||
|
@ -214,7 +233,8 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
handle.serialize(xdr_req);
|
ACCESS3Request req = new ACCESS3Request(handle);
|
||||||
|
req.serialize(xdr_req);
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unpriviledged user should fail.
|
||||||
ACCESS3Response response1 = nfsd.access(xdr_req.asReadOnlyWrap(),
|
ACCESS3Response response1 = nfsd.access(xdr_req.asReadOnlyWrap(),
|
||||||
|
@ -237,11 +257,9 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
handle.serialize(xdr_req);
|
SYMLINK3Request req = new SYMLINK3Request(handle, "fubar", new SetAttr3(),
|
||||||
xdr_req.writeString("fubar");
|
"bar");
|
||||||
SetAttr3 symAttr = new SetAttr3();
|
req.serialize(xdr_req);
|
||||||
symAttr.serialize(xdr_req);
|
|
||||||
xdr_req.writeString("bar");
|
|
||||||
|
|
||||||
SYMLINK3Response response = nfsd.symlink(xdr_req.asReadOnlyWrap(),
|
SYMLINK3Response response = nfsd.symlink(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandler, new InetSocketAddress("localhost", 1234));
|
securityHandler, new InetSocketAddress("localhost", 1234));
|
||||||
|
@ -251,7 +269,8 @@ public class TestRpcProgramNfs3 {
|
||||||
// Now perform readlink operations.
|
// Now perform readlink operations.
|
||||||
FileHandle handle2 = response.getObjFileHandle();
|
FileHandle handle2 = response.getObjFileHandle();
|
||||||
XDR xdr_req2 = new XDR();
|
XDR xdr_req2 = new XDR();
|
||||||
handle2.serialize(xdr_req2);
|
READLINK3Request req2 = new READLINK3Request(handle2);
|
||||||
|
req2.serialize(xdr_req2);
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unpriviledged user should fail.
|
||||||
READLINK3Response response1 = nfsd.readlink(xdr_req2.asReadOnlyWrap(),
|
READLINK3Response response1 = nfsd.readlink(xdr_req2.asReadOnlyWrap(),
|
||||||
|
@ -327,11 +346,9 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
handle.serialize(xdr_req);
|
CREATE3Request req = new CREATE3Request(handle, "fubar",
|
||||||
xdr_req.writeString("fubar");
|
Nfs3Constant.CREATE_UNCHECKED, new SetAttr3(), 0);
|
||||||
xdr_req.writeInt(Nfs3Constant.CREATE_UNCHECKED);
|
req.serialize(xdr_req);
|
||||||
SetAttr3 symAttr = new SetAttr3();
|
|
||||||
symAttr.serialize(xdr_req);
|
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unpriviledged user should fail.
|
||||||
CREATE3Response response1 = nfsd.create(xdr_req.asReadOnlyWrap(),
|
CREATE3Response response1 = nfsd.create(xdr_req.asReadOnlyWrap(),
|
||||||
|
@ -348,26 +365,27 @@ public class TestRpcProgramNfs3 {
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test(timeout = 60000)
|
@Test(timeout = 60000)
|
||||||
public void testMkdir() throws Exception {
|
public void testMkdir() throws Exception {//FixME
|
||||||
HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir);
|
HdfsFileStatus status = nn.getRpcServer().getFileInfo(testdir);
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
handle.serialize(xdr_req);
|
MKDIR3Request req = new MKDIR3Request(handle, "fubar1", new SetAttr3());
|
||||||
xdr_req.writeString("fubar");
|
req.serialize(xdr_req);
|
||||||
SetAttr3 symAttr = new SetAttr3();
|
|
||||||
symAttr.serialize(xdr_req);
|
|
||||||
xdr_req.writeString("bar");
|
|
||||||
|
|
||||||
// Attempt to remove by an unpriviledged user should fail.
|
// Attempt to mkdir by an unprivileged user should fail.
|
||||||
SYMLINK3Response response1 = nfsd.symlink(xdr_req.asReadOnlyWrap(),
|
MKDIR3Response response1 = nfsd.mkdir(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandlerUnpriviledged,
|
securityHandlerUnpriviledged,
|
||||||
new InetSocketAddress("localhost", 1234));
|
new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
||||||
response1.getStatus());
|
response1.getStatus());
|
||||||
|
|
||||||
// Attempt to remove by a priviledged user should pass.
|
XDR xdr_req2 = new XDR();
|
||||||
SYMLINK3Response response2 = nfsd.symlink(xdr_req.asReadOnlyWrap(),
|
MKDIR3Request req2 = new MKDIR3Request(handle, "fubar2", new SetAttr3());
|
||||||
|
req2.serialize(xdr_req2);
|
||||||
|
|
||||||
|
// Attempt to mkdir by a privileged user should pass.
|
||||||
|
MKDIR3Response response2 = nfsd.mkdir(xdr_req2.asReadOnlyWrap(),
|
||||||
securityHandler, new InetSocketAddress("localhost", 1234));
|
securityHandler, new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
||||||
response2.getStatus());
|
response2.getStatus());
|
||||||
|
@ -379,20 +397,18 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
handle.serialize(xdr_req);
|
SYMLINK3Request req = new SYMLINK3Request(handle, "fubar", new SetAttr3(),
|
||||||
xdr_req.writeString("fubar");
|
"bar");
|
||||||
SetAttr3 symAttr = new SetAttr3();
|
req.serialize(xdr_req);
|
||||||
symAttr.serialize(xdr_req);
|
|
||||||
xdr_req.writeString("bar");
|
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unprivileged user should fail.
|
||||||
SYMLINK3Response response1 = nfsd.symlink(xdr_req.asReadOnlyWrap(),
|
SYMLINK3Response response1 = nfsd.symlink(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandlerUnpriviledged,
|
securityHandlerUnpriviledged,
|
||||||
new InetSocketAddress("localhost", 1234));
|
new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
||||||
response1.getStatus());
|
response1.getStatus());
|
||||||
|
|
||||||
// Attempt by a priviledged user should pass.
|
// Attempt by a privileged user should pass.
|
||||||
SYMLINK3Response response2 = nfsd.symlink(xdr_req.asReadOnlyWrap(),
|
SYMLINK3Response response2 = nfsd.symlink(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandler, new InetSocketAddress("localhost", 1234));
|
securityHandler, new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
||||||
|
@ -405,8 +421,8 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
handle.serialize(xdr_req);
|
REMOVE3Request req = new REMOVE3Request(handle, "bar");
|
||||||
xdr_req.writeString("bar");
|
req.serialize(xdr_req);
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unpriviledged user should fail.
|
||||||
REMOVE3Response response1 = nfsd.remove(xdr_req.asReadOnlyWrap(),
|
REMOVE3Response response1 = nfsd.remove(xdr_req.asReadOnlyWrap(),
|
||||||
|
@ -428,17 +444,17 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
handle.serialize(xdr_req);
|
RMDIR3Request req = new RMDIR3Request(handle, "foo");
|
||||||
xdr_req.writeString("foo");
|
req.serialize(xdr_req);
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unprivileged user should fail.
|
||||||
RMDIR3Response response1 = nfsd.rmdir(xdr_req.asReadOnlyWrap(),
|
RMDIR3Response response1 = nfsd.rmdir(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandlerUnpriviledged,
|
securityHandlerUnpriviledged,
|
||||||
new InetSocketAddress("localhost", 1234));
|
new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
||||||
response1.getStatus());
|
response1.getStatus());
|
||||||
|
|
||||||
// Attempt by a priviledged user should pass.
|
// Attempt by a privileged user should pass.
|
||||||
RMDIR3Response response2 = nfsd.rmdir(xdr_req.asReadOnlyWrap(),
|
RMDIR3Response response2 = nfsd.rmdir(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandler, new InetSocketAddress("localhost", 1234));
|
securityHandler, new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
||||||
|
@ -451,19 +467,17 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
handle.serialize(xdr_req);
|
RENAME3Request req = new RENAME3Request(handle, "bar", handle, "fubar");
|
||||||
xdr_req.writeString("bar");
|
req.serialize(xdr_req);
|
||||||
handle.serialize(xdr_req);
|
|
||||||
xdr_req.writeString("fubar");
|
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unprivileged user should fail.
|
||||||
RENAME3Response response1 = nfsd.rename(xdr_req.asReadOnlyWrap(),
|
RENAME3Response response1 = nfsd.rename(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandlerUnpriviledged,
|
securityHandlerUnpriviledged,
|
||||||
new InetSocketAddress("localhost", 1234));
|
new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
||||||
response1.getStatus());
|
response1.getStatus());
|
||||||
|
|
||||||
// Attempt by a priviledged user should pass.
|
// Attempt by a privileged user should pass.
|
||||||
RENAME3Response response2 = nfsd.rename(xdr_req.asReadOnlyWrap(),
|
RENAME3Response response2 = nfsd.rename(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandler, new InetSocketAddress("localhost", 1234));
|
securityHandler, new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
||||||
|
@ -476,10 +490,8 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
handle.serialize(xdr_req);
|
READDIR3Request req = new READDIR3Request(handle, 0, 0, 100);
|
||||||
xdr_req.writeLongAsHyper(0);
|
req.serialize(xdr_req);
|
||||||
xdr_req.writeLongAsHyper(0);
|
|
||||||
xdr_req.writeInt(100);
|
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unpriviledged user should fail.
|
||||||
READDIR3Response response1 = nfsd.readdir(xdr_req.asReadOnlyWrap(),
|
READDIR3Response response1 = nfsd.readdir(xdr_req.asReadOnlyWrap(),
|
||||||
|
@ -501,20 +513,17 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
handle.serialize(xdr_req);
|
READDIRPLUS3Request req = new READDIRPLUS3Request(handle, 0, 0, 3, 2);
|
||||||
xdr_req.writeLongAsHyper(0);
|
req.serialize(xdr_req);
|
||||||
xdr_req.writeLongAsHyper(0);
|
|
||||||
xdr_req.writeInt(3);
|
|
||||||
xdr_req.writeInt(2);
|
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unprivileged user should fail.
|
||||||
READDIRPLUS3Response response1 = nfsd.readdirplus(xdr_req.asReadOnlyWrap(),
|
READDIRPLUS3Response response1 = nfsd.readdirplus(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandlerUnpriviledged,
|
securityHandlerUnpriviledged,
|
||||||
new InetSocketAddress("localhost", 1234));
|
new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3ERR_ACCES,
|
||||||
response1.getStatus());
|
response1.getStatus());
|
||||||
|
|
||||||
// Attempt by a priviledged user should pass.
|
// Attempt by a privileged user should pass.
|
||||||
READDIRPLUS3Response response2 = nfsd.readdirplus(xdr_req.asReadOnlyWrap(),
|
READDIRPLUS3Response response2 = nfsd.readdirplus(xdr_req.asReadOnlyWrap(),
|
||||||
securityHandler, new InetSocketAddress("localhost", 1234));
|
securityHandler, new InetSocketAddress("localhost", 1234));
|
||||||
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
assertEquals("Incorrect return code:", Nfs3Status.NFS3_OK,
|
||||||
|
@ -527,7 +536,8 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
handle.serialize(xdr_req);
|
FSSTAT3Request req = new FSSTAT3Request(handle);
|
||||||
|
req.serialize(xdr_req);
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unpriviledged user should fail.
|
||||||
FSSTAT3Response response1 = nfsd.fsstat(xdr_req.asReadOnlyWrap(),
|
FSSTAT3Response response1 = nfsd.fsstat(xdr_req.asReadOnlyWrap(),
|
||||||
|
@ -549,7 +559,8 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
handle.serialize(xdr_req);
|
FSINFO3Request req = new FSINFO3Request(handle);
|
||||||
|
req.serialize(xdr_req);
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unpriviledged user should fail.
|
||||||
FSINFO3Response response1 = nfsd.fsinfo(xdr_req.asReadOnlyWrap(),
|
FSINFO3Response response1 = nfsd.fsinfo(xdr_req.asReadOnlyWrap(),
|
||||||
|
@ -571,7 +582,8 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
handle.serialize(xdr_req);
|
PATHCONF3Request req = new PATHCONF3Request(handle);
|
||||||
|
req.serialize(xdr_req);
|
||||||
|
|
||||||
// Attempt by an unpriviledged user should fail.
|
// Attempt by an unpriviledged user should fail.
|
||||||
PATHCONF3Response response1 = nfsd.pathconf(xdr_req.asReadOnlyWrap(),
|
PATHCONF3Response response1 = nfsd.pathconf(xdr_req.asReadOnlyWrap(),
|
||||||
|
@ -593,9 +605,8 @@ public class TestRpcProgramNfs3 {
|
||||||
long dirId = status.getFileId();
|
long dirId = status.getFileId();
|
||||||
FileHandle handle = new FileHandle(dirId);
|
FileHandle handle = new FileHandle(dirId);
|
||||||
XDR xdr_req = new XDR();
|
XDR xdr_req = new XDR();
|
||||||
handle.serialize(xdr_req);
|
COMMIT3Request req = new COMMIT3Request(handle, 0, 5);
|
||||||
xdr_req.writeLongAsHyper(0);
|
req.serialize(xdr_req);
|
||||||
xdr_req.writeInt(5);
|
|
||||||
|
|
||||||
Channel ch = Mockito.mock(Channel.class);
|
Channel ch = Mockito.mock(Channel.class);
|
||||||
|
|
||||||
|
|
|
@ -293,6 +293,8 @@ Release 2.6.0 - UNRELEASED
|
||||||
HDFS-6908. Incorrect snapshot directory diff generated by snapshot deletion.
|
HDFS-6908. Incorrect snapshot directory diff generated by snapshot deletion.
|
||||||
(Juan Yu and jing9 via jing9)
|
(Juan Yu and jing9 via jing9)
|
||||||
|
|
||||||
|
HDFS-6892. Add XDR packaging method for each NFS request (brandonli)
|
||||||
|
|
||||||
Release 2.5.1 - UNRELEASED
|
Release 2.5.1 - UNRELEASED
|
||||||
|
|
||||||
INCOMPATIBLE CHANGES
|
INCOMPATIBLE CHANGES
|
||||||
|
|
Loading…
Reference in New Issue