diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
index b885891af73..921af31aa7e 100644
--- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
@@ -451,7 +451,7 @@
-
+
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 3263173687c..8e57d6efcf7 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -251,10 +251,11 @@
re2j
compile
+
com.google.protobuf
protobuf-java
- ${protobuf2.scope}
+ ${common.protobuf2.scope}
com.google.code.gson
@@ -484,11 +485,11 @@
**/ProtobufHelper.java
- **/RpcWritable.java
**/ProtobufRpcEngineCallback.java
**/ProtobufRpcEngine.java
**/ProtobufRpcEngine2.java
**/ProtobufRpcEngineProtos.java
+ **/ProtobufWrapperLegacy.java
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java
index c3cf3bc6e89..21b19fb3257 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java
@@ -37,10 +37,10 @@ import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestPr
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto;
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto;
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToObserverRequestProto;
-import org.apache.hadoop.ipc.ProtobufHelper;
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
import org.apache.hadoop.ipc.ProtocolTranslator;
import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.thirdparty.protobuf.RpcController;
@@ -87,7 +87,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
try {
rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ);
} catch (ServiceException e) {
- throw ProtobufHelper.extractRemoteException(e);
+ throw ShadedProtobufHelper.getRemoteException(e);
}
}
@@ -100,7 +100,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
rpcProxy.transitionToActive(NULL_CONTROLLER, req);
} catch (ServiceException e) {
- throw ProtobufHelper.extractRemoteException(e);
+ throw ShadedProtobufHelper.getRemoteException(e);
}
}
@@ -112,7 +112,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
.setReqInfo(convert(reqInfo)).build();
rpcProxy.transitionToStandby(NULL_CONTROLLER, req);
} catch (ServiceException e) {
- throw ProtobufHelper.extractRemoteException(e);
+ throw ShadedProtobufHelper.getRemoteException(e);
}
}
@@ -125,7 +125,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
.setReqInfo(convert(reqInfo)).build();
rpcProxy.transitionToObserver(NULL_CONTROLLER, req);
} catch (ServiceException e) {
- throw ProtobufHelper.extractRemoteException(e);
+ throw ShadedProtobufHelper.getRemoteException(e);
}
}
@@ -136,7 +136,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
status = rpcProxy.getServiceStatus(NULL_CONTROLLER,
GET_SERVICE_STATUS_REQ);
} catch (ServiceException e) {
- throw ProtobufHelper.extractRemoteException(e);
+ throw ShadedProtobufHelper.getRemoteException(e);
}
HAServiceStatus ret = new HAServiceStatus(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java
index 4a1e3b14767..306997f7d2d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java
@@ -27,10 +27,10 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.ha.ZKFCProtocol;
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto;
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto;
-import org.apache.hadoop.ipc.ProtobufHelper;
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
import org.apache.hadoop.ipc.ProtocolTranslator;
import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.UserGroupInformation;
@@ -63,7 +63,8 @@ public class ZKFCProtocolClientSideTranslatorPB implements
.build();
rpcProxy.cedeActive(NULL_CONTROLLER, req);
} catch (ServiceException e) {
- throw ProtobufHelper.extractRemoteException(e);
+
+ throw ShadedProtobufHelper.getRemoteException(e);
}
}
@@ -73,7 +74,8 @@ public class ZKFCProtocolClientSideTranslatorPB implements
rpcProxy.gracefulFailover(NULL_CONTROLLER,
GracefulFailoverRequestProto.getDefaultInstance());
} catch (ServiceException e) {
- throw ProtobufHelper.extractRemoteException(e);
+
+ throw ShadedProtobufHelper.getRemoteException(e);
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
index 9d7a5d516c4..d2c7f9c116c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
@@ -18,11 +18,10 @@
package org.apache.hadoop.ipc;
import java.io.IOException;
-import java.util.concurrent.ConcurrentHashMap;
import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
@@ -31,9 +30,15 @@ import org.apache.hadoop.thirdparty.protobuf.ByteString;
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
/**
- * Helper methods for protobuf related RPC implementation
+ * Helper methods for protobuf related RPC implementation.
+ * This is deprecated because it references protbuf 2.5 classes
+ * as well as the shaded one -and may need an unshaded protobuf
+ * JAR on the classpath during complicated.
+ * It should not be used internally; it is retained in case other
+ * applications use it.
*/
@InterfaceAudience.Private
+@Deprecated
public final class ProtobufHelper {
private ProtobufHelper() {
@@ -41,22 +46,14 @@ public final class ProtobufHelper {
}
/**
- * Return the IOException thrown by the remote server wrapped in
+ * Return the IOException thrown by the remote server wrapped in
* ServiceException as cause.
- * The signature of this method changes with updates to the hadoop-thirdparty
- * shaded protobuf library.
* @param se ServiceException that wraps IO exception thrown by the server
* @return Exception wrapped in ServiceException or
* a new IOException that wraps the unexpected ServiceException.
*/
- @InterfaceAudience.Private
- @InterfaceStability.Unstable
- public static IOException extractRemoteException(ServiceException se) {
- Throwable e = se.getCause();
- if (e == null) {
- return new IOException(se);
- }
- return e instanceof IOException ? (IOException) e : new IOException(se);
+ public static IOException getRemoteException(ServiceException se) {
+ return ShadedProtobufHelper.getRemoteException(se);
}
/**
@@ -79,29 +76,13 @@ public final class ProtobufHelper {
return e instanceof IOException ? (IOException) e : new IOException(se);
}
- /**
- * Map used to cache fixed strings to ByteStrings. Since there is no
- * automatic expiration policy, only use this for strings from a fixed, small
- * set.
- *
- * This map should not be accessed directly. Used the getFixedByteString
- * methods instead.
- */
- private final static ConcurrentHashMap