HDFS-7516. Fix findbugs warnings in hdfs-nfs project. Contributed by Brandon Li

(cherry picked from commit 42d8858c5d)
This commit is contained in:
Brandon Li 2014-12-15 11:28:56 -08:00
parent d2356a552a
commit 781a1e352c
14 changed files with 40 additions and 27 deletions

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.mount;
import java.util.List; import java.util.List;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.NfsExports; import org.apache.hadoop.nfs.NfsExports;
import org.apache.hadoop.oncrpc.RpcAcceptedReply; import org.apache.hadoop.oncrpc.RpcAcceptedReply;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -76,7 +77,7 @@ public class MountResponse {
if (hostGroups.length > 0) { if (hostGroups.length > 0) {
for (int j = 0; j < hostGroups.length; j++) { for (int j = 0; j < hostGroups.length; j++) {
xdr.writeBoolean(true); // Value follows - yes xdr.writeBoolean(true); // Value follows - yes
xdr.writeVariableOpaque(hostGroups[j].getBytes()); xdr.writeVariableOpaque(hostGroups[j].getBytes(Charsets.UTF_8));
} }
} }
xdr.writeBoolean(false); // Value follows - no more group xdr.writeBoolean(false); // Value follows - no more group

View File

@ -22,6 +22,7 @@ import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException; import java.security.NoSuchAlgorithmException;
import java.util.Arrays; import java.util.Arrays;
import org.apache.commons.io.Charsets;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -72,10 +73,8 @@ public class FileHandle {
return; return;
} }
byte[] in = s.getBytes(); byte[] in = s.getBytes(Charsets.UTF_8);
for (int i = 0; i < in.length; i++) { digest.update(in);
digest.update(in[i]);
}
byte[] digestbytes = digest.digest(); byte[] digestbytes = digest.digest();
for (int i = 0; i < 16; i++) { for (int i = 0; i < 16; i++) {

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.nfs.nfs3.Nfs3Constant; import org.apache.hadoop.nfs.nfs3.Nfs3Constant;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -78,7 +79,7 @@ public class CREATE3Request extends RequestWithHandle {
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.length()); xdr.writeInt(name.length());
xdr.writeFixedOpaque(name.getBytes(), name.length()); xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length());
xdr.writeInt(mode); xdr.writeInt(mode);
objAttr.serialize(xdr); objAttr.serialize(xdr);
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -56,6 +57,6 @@ public class LINK3Request extends RequestWithHandle {
handle.serialize(xdr); handle.serialize(xdr);
fromDirHandle.serialize(xdr); fromDirHandle.serialize(xdr);
xdr.writeInt(fromName.length()); xdr.writeInt(fromName.length());
xdr.writeFixedOpaque(fromName.getBytes(), fromName.length()); xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8), fromName.length());
} }
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -53,7 +54,7 @@ public class LOOKUP3Request extends RequestWithHandle {
@VisibleForTesting @VisibleForTesting
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes().length); xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes()); xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
} }
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -54,8 +55,8 @@ public class MKDIR3Request extends RequestWithHandle {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes().length); xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes()); xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
objAttr.serialize(xdr); objAttr.serialize(xdr);
} }
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.NfsFileType; import org.apache.hadoop.nfs.NfsFileType;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes.Specdata3; import org.apache.hadoop.nfs.nfs3.Nfs3FileAttributes.Specdata3;
@ -79,7 +80,7 @@ public class MKNOD3Request extends RequestWithHandle {
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.length()); xdr.writeInt(name.length());
xdr.writeFixedOpaque(name.getBytes(), name.length()); xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8), name.length());
objAttr.serialize(xdr); objAttr.serialize(xdr);
if (spec != null) { if (spec != null) {
xdr.writeInt(spec.getSpecdata1()); xdr.writeInt(spec.getSpecdata1());

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -46,7 +47,7 @@ public class REMOVE3Request extends RequestWithHandle {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes().length); xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes()); xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
} }
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -66,10 +67,10 @@ public class RENAME3Request extends NFS3Request {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
fromDirHandle.serialize(xdr); fromDirHandle.serialize(xdr);
xdr.writeInt(fromName.getBytes().length); xdr.writeInt(fromName.getBytes(Charsets.UTF_8).length);
xdr.writeFixedOpaque(fromName.getBytes()); xdr.writeFixedOpaque(fromName.getBytes(Charsets.UTF_8));
toDirHandle.serialize(xdr); toDirHandle.serialize(xdr);
xdr.writeInt(toName.getBytes().length); xdr.writeInt(toName.getBytes(Charsets.UTF_8).length);
xdr.writeFixedOpaque(toName.getBytes()); xdr.writeFixedOpaque(toName.getBytes(Charsets.UTF_8));
} }
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -46,7 +47,7 @@ public class RMDIR3Request extends RequestWithHandle {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes().length); xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes()); xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
} }
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.nfs.nfs3.request;
import java.io.IOException; import java.io.IOException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.nfs.nfs3.FileHandle; import org.apache.hadoop.nfs.nfs3.FileHandle;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
@ -62,10 +63,10 @@ public class SYMLINK3Request extends RequestWithHandle {
@Override @Override
public void serialize(XDR xdr) { public void serialize(XDR xdr) {
handle.serialize(xdr); handle.serialize(xdr);
xdr.writeInt(name.getBytes().length); xdr.writeInt(name.getBytes(Charsets.UTF_8).length);
xdr.writeFixedOpaque(name.getBytes()); xdr.writeFixedOpaque(name.getBytes(Charsets.UTF_8));
symAttr.serialize(xdr); symAttr.serialize(xdr);
xdr.writeInt(symData.getBytes().length); xdr.writeInt(symData.getBytes(Charsets.UTF_8).length);
xdr.writeFixedOpaque(symData.getBytes()); xdr.writeFixedOpaque(symData.getBytes(Charsets.UTF_8));
} }
} }

View File

@ -19,6 +19,7 @@ package org.apache.hadoop.oncrpc;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import org.apache.commons.io.Charsets;
import org.jboss.netty.buffer.ChannelBuffer; import org.jboss.netty.buffer.ChannelBuffer;
import org.jboss.netty.buffer.ChannelBuffers; import org.jboss.netty.buffer.ChannelBuffers;
@ -165,11 +166,11 @@ public final class XDR {
} }
public String readString() { public String readString() {
return new String(readVariableOpaque()); return new String(readVariableOpaque(), Charsets.UTF_8);
} }
public void writeString(String s) { public void writeString(String s) {
writeVariableOpaque(s.getBytes()); writeVariableOpaque(s.getBytes(Charsets.UTF_8));
} }
private void writePadding() { private void writePadding() {

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.oncrpc.security;
import java.net.InetAddress; import java.net.InetAddress;
import java.net.UnknownHostException; import java.net.UnknownHostException;
import org.apache.commons.io.Charsets;
import org.apache.hadoop.oncrpc.XDR; import org.apache.hadoop.oncrpc.XDR;
/** Credential used by AUTH_SYS */ /** Credential used by AUTH_SYS */
@ -93,7 +94,7 @@ public class CredentialsSys extends Credentials {
@Override @Override
public void write(XDR xdr) { public void write(XDR xdr) {
// mStamp + mHostName.length + mHostName + mUID + mGID + mAuxGIDs.count // mStamp + mHostName.length + mHostName + mUID + mGID + mAuxGIDs.count
mCredentialsLength = 20 + mHostName.getBytes().length; mCredentialsLength = 20 + mHostName.getBytes(Charsets.UTF_8).length;
// mAuxGIDs // mAuxGIDs
if (mAuxGIDs != null && mAuxGIDs.length > 0) { if (mAuxGIDs != null && mAuxGIDs.length > 0) {
mCredentialsLength += mAuxGIDs.length * 4; mCredentialsLength += mAuxGIDs.length * 4;

View File

@ -336,6 +336,8 @@ Release 2.7.0 - UNRELEASED
HDFS-7506. Consolidate implementation of setting inode attributes into a HDFS-7506. Consolidate implementation of setting inode attributes into a
single class. (wheat9) single class. (wheat9)
HDFS-7516. Fix findbugs warnings in hdfs-nfs project. (brandonli)
Release 2.6.1 - UNRELEASED Release 2.6.1 - UNRELEASED
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES