diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
index 2b3295ed751..c36631d1b60 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/dev-support/findbugsExcludeFile.xml
@@ -59,4 +59,11 @@
+
+
+
+
+
+
+
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
index e94c07d1a4e..235d5a7033b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocol/DatanodeID.java
@@ -18,6 +18,7 @@
package org.apache.hadoop.hdfs.protocol;
+import com.google.protobuf.ByteString;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
@@ -42,7 +43,9 @@ public class DatanodeID implements Comparable {
public static final DatanodeID[] EMPTY_ARRAY = {};
private String ipAddr; // IP address
+ private ByteString ipAddrBytes; // ipAddr ByteString to save on PB serde
private String hostName; // hostname claimed by datanode
+ private ByteString hostNameBytes; // hostName ByteString to save on PB serde
private String peerHostName; // hostname from the actual connection
private int xferPort; // data streaming port
private int infoPort; // info server port
@@ -56,6 +59,8 @@ public class DatanodeID implements Comparable {
* For newly formatted Datanodes it is a UUID.
*/
private final String datanodeUuid;
+ // datanodeUuid ByteString to save on PB serde
+ private final ByteString datanodeUuidBytes;
public DatanodeID(DatanodeID from) {
this(from.getDatanodeUuid(), from);
@@ -64,8 +69,11 @@ public DatanodeID(DatanodeID from) {
@VisibleForTesting
public DatanodeID(String datanodeUuid, DatanodeID from) {
this(from.getIpAddr(),
+ from.getIpAddrBytes(),
from.getHostName(),
+ from.getHostNameBytes(),
datanodeUuid,
+ getByteString(datanodeUuid),
from.getXferPort(),
from.getInfoPort(),
from.getInfoSecurePort(),
@@ -87,22 +95,43 @@ public DatanodeID(String datanodeUuid, DatanodeID from) {
*/
public DatanodeID(String ipAddr, String hostName, String datanodeUuid,
int xferPort, int infoPort, int infoSecurePort, int ipcPort) {
- setIpAndXferPort(ipAddr, xferPort);
+ this(ipAddr, getByteString(ipAddr),
+ hostName, getByteString(hostName),
+ datanodeUuid, getByteString(datanodeUuid),
+ xferPort, infoPort, infoSecurePort, ipcPort);
+ }
+
+ private DatanodeID(String ipAddr, ByteString ipAddrBytes,
+ String hostName, ByteString hostNameBytes,
+ String datanodeUuid, ByteString datanodeUuidBytes,
+ int xferPort, int infoPort, int infoSecurePort, int ipcPort) {
+ setIpAndXferPort(ipAddr, ipAddrBytes, xferPort);
this.hostName = hostName;
+ this.hostNameBytes = hostNameBytes;
this.datanodeUuid = checkDatanodeUuid(datanodeUuid);
+ this.datanodeUuidBytes = datanodeUuidBytes;
this.infoPort = infoPort;
this.infoSecurePort = infoSecurePort;
this.ipcPort = ipcPort;
}
- public void setIpAddr(String ipAddr) {
- //updated during registration, preserve former xferPort
- setIpAndXferPort(ipAddr, xferPort);
+ private static ByteString getByteString(String str) {
+ if (str != null) {
+ return ByteString.copyFromUtf8(str);
+ }
+ return ByteString.EMPTY;
}
- private void setIpAndXferPort(String ipAddr, int xferPort) {
+ public void setIpAddr(String ipAddr) {
+ //updated during registration, preserve former xferPort
+ setIpAndXferPort(ipAddr, getByteString(ipAddr), xferPort);
+ }
+
+ private void setIpAndXferPort(String ipAddr, ByteString ipAddrBytes,
+ int xferPort) {
// build xferAddr string to reduce cost of frequent use
this.ipAddr = ipAddr;
+ this.ipAddrBytes = ipAddrBytes;
this.xferPort = xferPort;
this.xferAddr = ipAddr + ":" + xferPort;
}
@@ -118,6 +147,10 @@ public String getDatanodeUuid() {
return datanodeUuid;
}
+ public ByteString getDatanodeUuidBytes() {
+ return datanodeUuidBytes;
+ }
+
private String checkDatanodeUuid(String uuid) {
if (uuid == null || uuid.isEmpty()) {
return null;
@@ -133,6 +166,10 @@ public String getIpAddr() {
return ipAddr;
}
+ public ByteString getIpAddrBytes() {
+ return ipAddrBytes;
+ }
+
/**
* @return hostname
*/
@@ -140,6 +177,10 @@ public String getHostName() {
return hostName;
}
+ public ByteString getHostNameBytes() {
+ return hostNameBytes;
+ }
+
/**
* @return hostname from the actual connection
*/
@@ -256,7 +297,8 @@ public String toString() {
* Note that this does not update storageID.
*/
public void updateRegInfo(DatanodeID nodeReg) {
- setIpAndXferPort(nodeReg.getIpAddr(), nodeReg.getXferPort());
+ setIpAndXferPort(nodeReg.getIpAddr(), nodeReg.getIpAddrBytes(),
+ nodeReg.getXferPort());
hostName = nodeReg.getHostName();
peerHostName = nodeReg.getPeerHostName();
infoPort = nodeReg.getInfoPort();
diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
index 3f27038c2b8..db4a02e9821 100644
--- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
+++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java
@@ -24,8 +24,12 @@
import java.util.Arrays;
import java.util.EnumSet;
import java.util.List;
+import java.util.concurrent.ConcurrentHashMap;
import com.google.common.base.Preconditions;
+import com.google.common.cache.CacheBuilder;
+import com.google.common.cache.CacheLoader;
+import com.google.common.cache.LoadingCache;
import com.google.common.collect.Lists;
import com.google.common.primitives.Shorts;
import com.google.protobuf.ByteString;
@@ -192,6 +196,49 @@ public class PBHelperClient {
private static final FsAction[] FSACTION_VALUES =
FsAction.values();
+ /**
+ * Map used to cache fixed strings to ByteStrings. Since there is no
+ * automatic expiration policy, only use this for strings from a fixed, small
+ * set.
+ *
+ * This map should not be accessed directly. Used the getFixedByteString
+ * methods instead.
+ */
+ private static ConcurrentHashMap