From 5f86dd58bd5cff1ecac355791a40d0dc26bdddc2 Mon Sep 17 00:00:00 2001 From: Steve Loughran Date: Thu, 20 Oct 2022 15:16:21 +0100 Subject: [PATCH] HADOOP-18487. making protobuf 2.5 optional * new org.apache.hadoop.ipc.internal for internal only classes * with a ShadedProtobufHelper in there which has shaded protobuf refs only, so guaranteed not to need protobuf-2.5 on the CP * findbugs, protobuf source patch etc * can spec new export policies for the protobuf jar in hadoop-common and other places it is referenced. hadoop-common back @compile Change-Id: I61a99d2fd673259ab50d000f28a29e8a38aaf1b1 --- .../dev-support/findbugsExcludeFile.xml | 2 +- hadoop-common-project/hadoop-common/pom.xml | 5 +- ...ServiceProtocolClientSideTranslatorPB.java | 12 +- .../ZKFCProtocolClientSideTranslatorPB.java | 8 +- .../org/apache/hadoop/ipc/ProtobufHelper.java | 65 +--- .../hadoop/ipc/ProtobufWrapperLegacy.java | 19 +- .../org/apache/hadoop/ipc/RpcClientUtil.java | 4 +- .../ipc/internal/ShadedProtobufHelper.java | 144 ++++++++ .../hadoop/ipc/internal/package-info.java | 28 ++ ...RefreshProtocolClientSideTranslatorPB.java | 5 +- ...llQueueProtocolClientSideTranslatorPB.java | 5 +- .../apache/hadoop/security/Credentials.java | 6 +- ...nPolicyProtocolClientSideTranslatorPB.java | 5 +- ...appingsProtocolClientSideTranslatorPB.java | 8 +- ...appingsProtocolClientSideTranslatorPB.java | 6 +- ...per.java => TestShadedProtobufHelper.java} | 15 +- .../ClientDatanodeProtocolTranslatorPB.java | 36 +- .../ClientNamenodeProtocolTranslatorPB.java | 332 ++++++++++++------ .../hdfs/protocolPB/PBHelperClient.java | 13 +- .../ReconfigurationProtocolTranslatorPB.java | 11 +- .../RouterAdminProtocolTranslatorPB.java | 42 ++- ...ifelineProtocolClientSideTranslatorPB.java | 5 +- ...atanodeProtocolClientSideTranslatorPB.java | 29 +- ...liasMapProtocolClientSideTranslatorPB.java | 14 +- .../InterDatanodeProtocolTranslatorPB.java | 8 +- .../JournalProtocolTranslatorPB.java | 11 +- .../NamenodeProtocolTranslatorPB.java | 44 ++- .../InterQJournalProtocolTranslatorPB.java | 5 +- .../QJournalProtocolTranslatorPB.java | 62 ++-- ...RefreshProtocolClientSideTranslatorPB.java | 14 +- hadoop-project/pom.xml | 7 +- ...gerAdministrationProtocolPBClientImpl.java | 4 +- 32 files changed, 659 insertions(+), 315 deletions(-) create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/ShadedProtobufHelper.java create mode 100644 hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/package-info.java rename hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/{TestProtobufHelper.java => TestShadedProtobufHelper.java} (83%) diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml index b885891af73..921af31aa7e 100644 --- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml +++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml @@ -451,7 +451,7 @@ - + diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 3263173687c..8e57d6efcf7 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -251,10 +251,11 @@ re2j compile + com.google.protobuf protobuf-java - ${protobuf2.scope} + ${common.protobuf2.scope} com.google.code.gson @@ -484,11 +485,11 @@ **/ProtobufHelper.java - **/RpcWritable.java **/ProtobufRpcEngineCallback.java **/ProtobufRpcEngine.java **/ProtobufRpcEngine2.java **/ProtobufRpcEngineProtos.java + **/ProtobufWrapperLegacy.java diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java index c3cf3bc6e89..21b19fb3257 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java @@ -37,10 +37,10 @@ import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestPr import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto; import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto; import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToObserverRequestProto; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.thirdparty.protobuf.RpcController; @@ -87,7 +87,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements try { rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -100,7 +100,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements rpcProxy.transitionToActive(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -112,7 +112,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements .setReqInfo(convert(reqInfo)).build(); rpcProxy.transitionToStandby(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -125,7 +125,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements .setReqInfo(convert(reqInfo)).build(); rpcProxy.transitionToObserver(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -136,7 +136,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements status = rpcProxy.getServiceStatus(NULL_CONTROLLER, GET_SERVICE_STATUS_REQ); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } HAServiceStatus ret = new HAServiceStatus( diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java index 4a1e3b14767..306997f7d2d 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java @@ -27,10 +27,10 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.ZKFCProtocol; import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto; import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.security.UserGroupInformation; @@ -63,7 +63,8 @@ public class ZKFCProtocolClientSideTranslatorPB implements .build(); rpcProxy.cedeActive(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -73,7 +74,8 @@ public class ZKFCProtocolClientSideTranslatorPB implements rpcProxy.gracefulFailover(NULL_CONTROLLER, GracefulFailoverRequestProto.getDefaultInstance()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java index 9d7a5d516c4..d2c7f9c116c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java @@ -18,11 +18,10 @@ package org.apache.hadoop.ipc; import java.io.IOException; -import java.util.concurrent.ConcurrentHashMap; import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.io.Text; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; @@ -31,9 +30,15 @@ import org.apache.hadoop.thirdparty.protobuf.ByteString; import org.apache.hadoop.thirdparty.protobuf.ServiceException; /** - * Helper methods for protobuf related RPC implementation + * Helper methods for protobuf related RPC implementation. + * This is deprecated because it references protbuf 2.5 classes + * as well as the shaded one -and may need an unshaded protobuf + * JAR on the classpath during complicated. + * It should not be used internally; it is retained in case other + * applications use it. */ @InterfaceAudience.Private +@Deprecated public final class ProtobufHelper { private ProtobufHelper() { @@ -41,22 +46,14 @@ public final class ProtobufHelper { } /** - * Return the IOException thrown by the remote server wrapped in + * Return the IOException thrown by the remote server wrapped in * ServiceException as cause. - * The signature of this method changes with updates to the hadoop-thirdparty - * shaded protobuf library. * @param se ServiceException that wraps IO exception thrown by the server * @return Exception wrapped in ServiceException or * a new IOException that wraps the unexpected ServiceException. */ - @InterfaceAudience.Private - @InterfaceStability.Unstable - public static IOException extractRemoteException(ServiceException se) { - Throwable e = se.getCause(); - if (e == null) { - return new IOException(se); - } - return e instanceof IOException ? (IOException) e : new IOException(se); + public static IOException getRemoteException(ServiceException se) { + return ShadedProtobufHelper.getRemoteException(se); } /** @@ -79,29 +76,13 @@ public final class ProtobufHelper { return e instanceof IOException ? (IOException) e : new IOException(se); } - /** - * Map used to cache fixed strings to ByteStrings. Since there is no - * automatic expiration policy, only use this for strings from a fixed, small - * set. - *

- * This map should not be accessed directly. Used the getFixedByteString - * methods instead. - */ - private final static ConcurrentHashMap - FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>(); - /** * Get the ByteString for frequently used fixed and small set strings. * @param key string * @return the ByteString for frequently used fixed and small set strings. */ public static ByteString getFixedByteString(Text key) { - ByteString value = FIXED_BYTESTRING_CACHE.get(key); - if (value == null) { - value = ByteString.copyFromUtf8(key.toString()); - FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value); - } - return value; + return ShadedProtobufHelper.getFixedByteString(key); } /** @@ -110,34 +91,20 @@ public final class ProtobufHelper { * @return ByteString for frequently used fixed and small set strings. */ public static ByteString getFixedByteString(String key) { - ByteString value = FIXED_BYTESTRING_CACHE.get(key); - if (value == null) { - value = ByteString.copyFromUtf8(key); - FIXED_BYTESTRING_CACHE.put(key, value); - } - return value; + return ShadedProtobufHelper.getFixedByteString(key); } public static ByteString getByteString(byte[] bytes) { // return singleton to reduce object allocation - return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes); + return ShadedProtobufHelper.getByteString(bytes); } public static Token tokenFromProto( TokenProto tokenProto) { - Token token = new Token<>( - tokenProto.getIdentifier().toByteArray(), - tokenProto.getPassword().toByteArray(), new Text(tokenProto.getKind()), - new Text(tokenProto.getService())); - return token; + return ShadedProtobufHelper.tokenFromProto(tokenProto); } public static TokenProto protoFromToken(Token tok) { - TokenProto.Builder builder = TokenProto.newBuilder(). - setIdentifier(getByteString(tok.getIdentifier())). - setPassword(getByteString(tok.getPassword())). - setKindBytes(getFixedByteString(tok.getKind())). - setServiceBytes(getFixedByteString(tok.getService())); - return builder.build(); + return ShadedProtobufHelper.protoFromToken(tok); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufWrapperLegacy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufWrapperLegacy.java index e00b1a80055..0f264e0dccc 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufWrapperLegacy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufWrapperLegacy.java @@ -37,7 +37,7 @@ import org.apache.hadoop.util.Preconditions; * which is a protobuf message unless that condition is met. */ @InterfaceAudience.Private -class ProtobufWrapperLegacy extends RpcWritable { +public class ProtobufWrapperLegacy extends RpcWritable { private com.google.protobuf.Message message; @@ -48,19 +48,20 @@ class ProtobufWrapperLegacy extends RpcWritable { * @param message message to wrap. * @throws IllegalArgumentException if the class is not a protobuf message. */ - ProtobufWrapperLegacy(Object message) { + public ProtobufWrapperLegacy(Object message) { Preconditions.checkArgument(isUnshadedProtobufMessage(message), "message class is not an unshaded protobuf message %s", message.getClass()); this.message = (com.google.protobuf.Message) message; } - com.google.protobuf.Message getMessage() { + public com.google.protobuf.Message getMessage() { return message; } + @Override - void writeTo(ResponseBuffer out) throws IOException { + public void writeTo(ResponseBuffer out) throws IOException { int length = message.getSerializedSize(); length += com.google.protobuf.CodedOutputStream. computeUInt32SizeNoTag(length); @@ -70,7 +71,7 @@ class ProtobufWrapperLegacy extends RpcWritable { @SuppressWarnings("unchecked") @Override - T readFrom(ByteBuffer bb) throws IOException { + protected T readFrom(ByteBuffer bb) throws IOException { // using the parser with a byte[]-backed coded input stream is the // most efficient way to deserialize a protobuf. it has a direct // path to the PB ctor that doesn't create multi-layered streams @@ -93,14 +94,15 @@ class ProtobufWrapperLegacy extends RpcWritable { * Has protobuf been looked for and is known as absent? * Saves a check on every message. */ - private static final AtomicBoolean PROTOBUF_KNOWN_NOT_FOUND = new AtomicBoolean(false); + private static final AtomicBoolean PROTOBUF_KNOWN_NOT_FOUND = + new AtomicBoolean(false); /** * Is a message an unshaded protobuf message? * @param payload payload * @return true if protobuf.jar is on the classpath and the payload is a Message */ - static boolean isUnshadedProtobufMessage(Object payload) { + public static boolean isUnshadedProtobufMessage(Object payload) { if (PROTOBUF_KNOWN_NOT_FOUND.get()) { // protobuf is known to be absent. fail fast without examining // jars or generating exceptions. @@ -111,7 +113,8 @@ class ProtobufWrapperLegacy extends RpcWritable { // an unshaded protobuf message // this relies on classloader caching for performance try { - Class protobufMessageClazz = Class.forName("com.google.protobuf.Message"); + Class protobufMessageClazz = + Class.forName("com.google.protobuf.Message"); return protobufMessageClazz.isAssignableFrom(payload.getClass()); } catch (ClassNotFoundException e) { PROTOBUF_KNOWN_NOT_FOUND.set(true); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java index 1d1cef7a827..e65fed8f81a 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java @@ -27,6 +27,7 @@ import java.util.TreeMap; import java.util.concurrent.ConcurrentHashMap; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto; import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto; import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto; @@ -126,7 +127,8 @@ public class RpcClientUtil { resp = protocolInfoProxy.getProtocolSignature(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } versionMap = convertProtocolSignatureProtos(resp .getProtocolSignatureList()); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/ShadedProtobufHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/ShadedProtobufHelper.java new file mode 100644 index 00000000000..b73864dbc5c --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/ShadedProtobufHelper.java @@ -0,0 +1,144 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.ipc.internal; + +import java.io.IOException; +import java.util.concurrent.ConcurrentHashMap; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.io.Text; +import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; +import org.apache.hadoop.security.token.Token; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.thirdparty.protobuf.ByteString; +import org.apache.hadoop.thirdparty.protobuf.ServiceException; + +/** + * Helper methods for protobuf related RPC implementation using the + * hadoop {@code org.apache.hadoop.thirdparty.protobuf} shaded version. + * This is absolutely private. + */ +@InterfaceAudience.Private +@InterfaceStability.Unstable +public final class ShadedProtobufHelper { + + private ShadedProtobufHelper() { + // Hidden constructor for class with only static helper methods + } + + /** + * Return the IOException thrown by the remote server wrapped in + * ServiceException as cause. + * The signature of this method changes with updates to the hadoop-thirdparty + * shaded protobuf library. + * @param se ServiceException that wraps IO exception thrown by the server + * @return Exception wrapped in ServiceException or + * a new IOException that wraps the unexpected ServiceException. + */ + @InterfaceAudience.Private + @InterfaceStability.Unstable + public static IOException getRemoteException(ServiceException se) { + Throwable e = se.getCause(); + if (e == null) { + return new IOException(se); + } + return e instanceof IOException ? (IOException) e : new IOException(se); + } + + /** + * Map used to cache fixed strings to ByteStrings. Since there is no + * automatic expiration policy, only use this for strings from a fixed, small + * set. + *

+ * This map should not be accessed directly. Used the getFixedByteString + * methods instead. + */ + private static final ConcurrentHashMap + FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>(); + + /** + * Get the ByteString for frequently used fixed and small set strings. + * @param key string + * @return the ByteString for frequently used fixed and small set strings. + */ + public static ByteString getFixedByteString(Text key) { + ByteString value = FIXED_BYTESTRING_CACHE.get(key); + if (value == null) { + value = ByteString.copyFromUtf8(key.toString()); + FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value); + } + return value; + } + + /** + * Get the ByteString for frequently used fixed and small set strings. + * @param key string + * @return ByteString for frequently used fixed and small set strings. + */ + public static ByteString getFixedByteString(String key) { + ByteString value = FIXED_BYTESTRING_CACHE.get(key); + if (value == null) { + value = ByteString.copyFromUtf8(key); + FIXED_BYTESTRING_CACHE.put(key, value); + } + return ShadedProtobufHelper.getFixedByteString(key); + } + + /** + * Get the byte string of a non-null byte array. + * If the array is 0 bytes long, return a singleton to reduce object allocation. + * @param bytes bytes to convert. + * @return a value + */ + public static ByteString getByteString(byte[] bytes) { + // return singleton to reduce object allocation + return (bytes.length == 0) + ? ByteString.EMPTY + : ByteString.copyFrom(bytes); + } + + public static Token tokenFromProto( + TokenProto tokenProto) { + Token token = new Token<>( + tokenProto.getIdentifier().toByteArray(), + tokenProto.getPassword().toByteArray(), + new Text(tokenProto.getKind()), + new Text(tokenProto.getService())); + return token; + } + + + /** + * Create a {@code TokenProto} instance + * from a hadoop token. + * This builds and caches the fields + * (identifier, password, kind, service) but not + * renewer or any payload. + * @param tok token + * @return a marshallable protobuf class. + */ + public static TokenProto protoFromToken(Token tok) { + TokenProto.Builder builder = TokenProto.newBuilder(). + setIdentifier(getByteString(tok.getIdentifier())). + setPassword(getByteString(tok.getPassword())). + setKindBytes(getFixedByteString(tok.getKind())). + setServiceBytes(getFixedByteString(tok.getService())); + return builder.build(); + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/package-info.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/package-info.java new file mode 100644 index 00000000000..aa190437d08 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/internal/package-info.java @@ -0,0 +1,28 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * IPC internal classes not for any use by libraries outside + * the apache hadoop source tree. + */ +@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "YARN"}) +@InterfaceStability.Unstable +package org.apache.hadoop.ipc.internal; + +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/GenericRefreshProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/GenericRefreshProtocolClientSideTranslatorPB.java index cad2eba0927..f9daddf21ac 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/GenericRefreshProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/GenericRefreshProtocolClientSideTranslatorPB.java @@ -25,12 +25,12 @@ import java.util.Arrays; import java.util.Collection; import java.util.List; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RefreshResponse; import org.apache.hadoop.ipc.RpcClientUtil; import org.apache.hadoop.ipc.GenericRefreshProtocol; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto; import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto; import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto; @@ -68,7 +68,8 @@ public class GenericRefreshProtocolClientSideTranslatorPB implements GenericRefreshResponseCollectionProto resp = rpcProxy.refresh(NULL_CONTROLLER, request); return unpack(resp); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/RefreshCallQueueProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/RefreshCallQueueProtocolClientSideTranslatorPB.java index b936912f04d..4a0eb6f5348 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/RefreshCallQueueProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protocolPB/RefreshCallQueueProtocolClientSideTranslatorPB.java @@ -21,11 +21,11 @@ package org.apache.hadoop.ipc.protocolPB; import java.io.Closeable; import java.io.IOException; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; import org.apache.hadoop.ipc.RefreshCallQueueProtocol; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.ipc.proto.RefreshCallQueueProtocolProtos.RefreshCallQueueRequestProto; import org.apache.hadoop.thirdparty.protobuf.RpcController; @@ -58,7 +58,8 @@ public class RefreshCallQueueProtocolClientSideTranslatorPB implements rpcProxy.refreshCallQueue(NULL_CONTROLLER, VOID_REFRESH_CALL_QUEUE_REQUEST); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java index ef309cb2247..bbbcc952888 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/Credentials.java @@ -18,6 +18,7 @@ package org.apache.hadoop.security; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.thirdparty.protobuf.ByteString; import java.io.BufferedInputStream; @@ -46,7 +47,6 @@ import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.WritableUtils; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; import org.apache.hadoop.security.proto.SecurityProtos.CredentialsKVProto; @@ -382,7 +382,7 @@ public class Credentials implements Writable { CredentialsKVProto.Builder kv = CredentialsKVProto.newBuilder(). setAliasBytes(ByteString.copyFrom( e.getKey().getBytes(), 0, e.getKey().getLength())). - setToken(ProtobufHelper.protoFromToken(e.getValue())); + setToken(ShadedProtobufHelper.protoFromToken(e.getValue())); storage.addTokens(kv.build()); } @@ -404,7 +404,7 @@ public class Credentials implements Writable { CredentialsProto storage = CredentialsProto.parseDelimitedFrom((DataInputStream)in); for (CredentialsKVProto kv : storage.getTokensList()) { addToken(new Text(kv.getAliasBytes().toByteArray()), - ProtobufHelper.tokenFromProto(kv.getToken())); + ShadedProtobufHelper.tokenFromProto(kv.getToken())); } for (CredentialsKVProto kv : storage.getSecretsList()) { addSecretKey(new Text(kv.getAliasBytes().toByteArray()), diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java index 90c8a5130e7..8189ada023f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshAuthorizationPolicyProtocolClientSideTranslatorPB.java @@ -21,10 +21,10 @@ package org.apache.hadoop.security.protocolPB; import java.io.Closeable; import java.io.IOException; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol; import org.apache.hadoop.security.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclRequestProto; @@ -58,7 +58,8 @@ public class RefreshAuthorizationPolicyProtocolClientSideTranslatorPB implements rpcProxy.refreshServiceAcl(NULL_CONTROLLER, VOID_REFRESH_SERVICE_ACL_REQUEST); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java index ce6d1b58bb9..de71c041687 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/security/protocolPB/RefreshUserMappingsProtocolClientSideTranslatorPB.java @@ -21,10 +21,10 @@ package org.apache.hadoop.security.protocolPB; import java.io.Closeable; import java.io.IOException; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.security.RefreshUserMappingsProtocol; import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationRequestProto; import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsRequestProto; @@ -63,7 +63,8 @@ public class RefreshUserMappingsProtocolClientSideTranslatorPB implements rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER, VOID_REFRESH_USER_TO_GROUPS_MAPPING_REQUEST); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } @@ -73,7 +74,8 @@ public class RefreshUserMappingsProtocolClientSideTranslatorPB implements rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER, VOID_REFRESH_SUPERUSER_GROUPS_CONFIGURATION_REQUEST); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java index 34b6403fe09..a55bed22703 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tools/protocolPB/GetUserMappingsProtocolClientSideTranslatorPB.java @@ -20,10 +20,11 @@ package org.apache.hadoop.tools.protocolPB; import java.io.Closeable; import java.io.IOException; -import org.apache.hadoop.ipc.ProtobufHelper; + import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.tools.GetUserMappingsProtocol; import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto; import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto; @@ -56,7 +57,8 @@ public class GetUserMappingsProtocolClientSideTranslatorPB implements try { resp = rpcProxy.getGroupsForUser(NULL_CONTROLLER, request); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } return resp.getGroupsList().toArray(new String[resp.getGroupsCount()]); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtobufHelper.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestShadedProtobufHelper.java similarity index 83% rename from hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtobufHelper.java rename to hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestShadedProtobufHelper.java index b7a9a813dc5..73b42e1fdeb 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtobufHelper.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestShadedProtobufHelper.java @@ -22,28 +22,30 @@ import java.io.IOException; import org.junit.Test; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.test.AbstractHadoopTestBase; import org.apache.hadoop.thirdparty.protobuf.ServiceException; -import static org.apache.hadoop.ipc.ProtobufHelper.extractRemoteException; import static org.apache.hadoop.test.LambdaTestUtils.verifyCause; /** - * Test methods in {@link ProtobufHelper}. + * Test methods in {@link ShadedProtobufHelper}. */ -public class TestProtobufHelper extends AbstractHadoopTestBase { +public class TestShadedProtobufHelper extends AbstractHadoopTestBase { @Test public void testExtractRemoteExceptionNoCause() throws Throwable { ServiceException source = new ServiceException("empty"); - IOException ex = extractRemoteException(source); + + IOException ex = ShadedProtobufHelper.getRemoteException(source); verifyCause(ServiceException.class, ex); } @Test public void testExtractRemoteExceptionIOECause() throws Throwable { IOException source = new IOException("ioe"); - IOException ex = extractRemoteException( + + IOException ex = ShadedProtobufHelper.getRemoteException( new ServiceException(source)); // if not the same, throw if (!(ex == source)) { @@ -54,7 +56,8 @@ public class TestProtobufHelper extends AbstractHadoopTestBase { @Test public void testExtractRemoteExceptionOtherCause() throws Throwable { NullPointerException source = new NullPointerException("npe"); - IOException ex = extractRemoteException( + + IOException ex = ShadedProtobufHelper.getRemoteException( new ServiceException(source)); // if not the same, throw ServiceException c1 = verifyCause(ServiceException.class, ex); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java index 7039f100284..f973f5edbbb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java @@ -67,12 +67,12 @@ import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.DiskBa import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus; import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus.Result; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.Token; @@ -200,7 +200,7 @@ public class ClientDatanodeProtocolTranslatorPB implements try { return rpcProxy.getReplicaVisibleLength(NULL_CONTROLLER, req).getLength(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -209,7 +209,7 @@ public class ClientDatanodeProtocolTranslatorPB implements try { rpcProxy.refreshNamenodes(NULL_CONTROLLER, VOID_REFRESH_NAMENODES); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -220,7 +220,7 @@ public class ClientDatanodeProtocolTranslatorPB implements try { rpcProxy.deleteBlockPool(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -235,7 +235,7 @@ public class ClientDatanodeProtocolTranslatorPB implements try { resp = rpcProxy.getBlockLocalPathInfo(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } return new BlockLocalPathInfo(PBHelperClient.convert(resp.getBlock()), resp.getLocalPath(), resp.getLocalMetaPath()); @@ -260,7 +260,7 @@ public class ClientDatanodeProtocolTranslatorPB implements try { rpcProxy.shutdownDatanode(NULL_CONTROLLER, request); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -269,7 +269,7 @@ public class ClientDatanodeProtocolTranslatorPB implements try { rpcProxy.evictWriters(NULL_CONTROLLER, VOID_EVICT_WRITERS); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -281,7 +281,7 @@ public class ClientDatanodeProtocolTranslatorPB implements VOID_GET_DATANODE_INFO); return PBHelperClient.convert(response.getLocalInfo()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -290,7 +290,7 @@ public class ClientDatanodeProtocolTranslatorPB implements try { rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -304,7 +304,7 @@ public class ClientDatanodeProtocolTranslatorPB implements NULL_CONTROLLER, VOID_GET_RECONFIG_STATUS)); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -316,7 +316,7 @@ public class ClientDatanodeProtocolTranslatorPB implements VOID_LIST_RECONFIGURABLE_PROPERTIES); return response.getNameList(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -331,7 +331,7 @@ public class ClientDatanodeProtocolTranslatorPB implements } rpcProxy.triggerBlockReport(NULL_CONTROLLER, builder.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -343,7 +343,7 @@ public class ClientDatanodeProtocolTranslatorPB implements VOID_GET_BALANCER_BANDWIDTH); return response.getBandwidth(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -374,7 +374,7 @@ public class ClientDatanodeProtocolTranslatorPB implements .build(); rpcProxy.submitDiskBalancerPlan(NULL_CONTROLLER, request); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -392,7 +392,7 @@ public class ClientDatanodeProtocolTranslatorPB implements .setPlanID(planID).build(); rpcProxy.cancelDiskBalancerPlan(NULL_CONTROLLER, request); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -417,7 +417,7 @@ public class ClientDatanodeProtocolTranslatorPB implements response.hasPlanFile() ? response.getPlanFile() : null, response.hasCurrentStatus() ? response.getCurrentStatus() : null); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -430,7 +430,7 @@ public class ClientDatanodeProtocolTranslatorPB implements rpcProxy.getDiskBalancerSetting(NULL_CONTROLLER, request); return response.hasValue() ? response.getValue() : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -450,7 +450,7 @@ public class ClientDatanodeProtocolTranslatorPB implements } return volumeInfoList; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java index 7855bb20891..252f817ef6d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java @@ -238,13 +238,13 @@ import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.retry.AsyncCallHandler; import org.apache.hadoop.ipc.Client; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto; import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto; import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenResponseProto; @@ -338,7 +338,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return resp.hasLocations() ? PBHelperClient.convert(resp.getLocations()) : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -349,7 +350,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient .convert(rpcProxy.getServerDefaults(null, req).getServerDefaults()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -385,7 +387,8 @@ public class ClientNamenodeProtocolTranslatorPB implements CreateResponseProto res = rpcProxy.create(null, req); return res.hasFs() ? PBHelperClient.convert(res.getFs()) : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -401,7 +404,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.truncate(null, req).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -420,7 +424,8 @@ public class ClientNamenodeProtocolTranslatorPB implements PBHelperClient.convert(res.getStat()) : null; return new LastBlockWithStatus(lastBlock, stat); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -434,7 +439,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.setReplication(null, req).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -453,7 +459,8 @@ public class ClientNamenodeProtocolTranslatorPB implements rpcProxy.setPermission(null, req); } } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -493,7 +500,8 @@ public class ClientNamenodeProtocolTranslatorPB implements rpcProxy.setOwner(null, req.build()); } } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -506,7 +514,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.abandonBlock(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -532,7 +541,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convertLocatedBlockProto( rpcProxy.addBlock(null, req.build()).getBlock()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -556,7 +566,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convertLocatedBlockProto( rpcProxy.getAdditionalDatanode(null, req).getBlock()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -572,7 +583,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.complete(null, req.build()).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -585,7 +597,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.reportBadBlocks(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -598,7 +611,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.rename(null, req).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -632,7 +646,8 @@ public class ClientNamenodeProtocolTranslatorPB implements rpcProxy.rename2(null, req); } } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -645,7 +660,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.concat(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -657,7 +673,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.delete(null, req).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -676,7 +693,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.mkdirs(null, req).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -695,7 +713,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -738,7 +757,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -754,7 +774,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.renewLease(null, builder.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -767,7 +788,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.recoverLease(null, req).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -777,7 +799,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert(rpcProxy.getFsStats(null, VOID_GET_FSSTATUS_REQUEST)); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -787,7 +810,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert(rpcProxy.getFsReplicatedBlockStats(null, VOID_GET_FS_REPLICATED_BLOCK_STATS_REQUEST)); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -797,7 +821,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert(rpcProxy.getFsECBlockGroupStats(null, VOID_GET_FS_ECBLOCKGROUP_STATS_REQUEST)); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -811,7 +836,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert( rpcProxy.getDatanodeReport(null, req).getDiList()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -826,7 +852,8 @@ public class ClientNamenodeProtocolTranslatorPB implements rpcProxy.getDatanodeStorageReport(null, req) .getDatanodeStorageReportsList()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -839,7 +866,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.getPreferredBlockSize(null, req).getBsize(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -852,7 +880,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.setSafeMode(null, req).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -863,7 +892,8 @@ public class ClientNamenodeProtocolTranslatorPB implements .setTimeWindow(timeWindow).setTxGap(txGap).build(); return rpcProxy.saveNamespace(null, req).getSaved(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -874,7 +904,8 @@ public class ClientNamenodeProtocolTranslatorPB implements VOID_ROLLEDITS_REQUEST); return resp.getNewSegmentTxId(); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } @@ -886,7 +917,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.restoreFailedStorage(null, req).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -895,7 +927,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.refreshNodes(null, VOID_REFRESH_NODES_REQUEST); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -904,7 +937,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.finalizeUpgrade(null, VOID_FINALIZE_UPGRADE_REQUEST); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -915,7 +949,8 @@ public class ClientNamenodeProtocolTranslatorPB implements null, VOID_UPGRADE_STATUS_REQUEST); return proto.getUpgradeFinalized(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -932,7 +967,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -947,7 +983,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert( rpcProxy.listCorruptFileBlocks(null, req.build()).getCorrupt()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -958,7 +995,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.metaSave(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -972,7 +1010,8 @@ public class ClientNamenodeProtocolTranslatorPB implements GetFileInfoResponseProto res = rpcProxy.getFileInfo(null, req); return res.hasFs() ? PBHelperClient.convert(res.getFs()) : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -991,7 +1030,8 @@ public class ClientNamenodeProtocolTranslatorPB implements ? PBHelperClient.convert(res.getFs()) : null); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1003,7 +1043,8 @@ public class ClientNamenodeProtocolTranslatorPB implements GetFileLinkInfoResponseProto result = rpcProxy.getFileLinkInfo(null, req); return result.hasFs() ? PBHelperClient.convert(result.getFs()) : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1017,7 +1058,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert(rpcProxy.getContentSummary(null, req) .getSummary()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1036,7 +1078,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.setQuota(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1049,7 +1092,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.fsync(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1063,7 +1107,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.setTimes(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1079,7 +1124,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.createSymlink(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1091,7 +1137,8 @@ public class ClientNamenodeProtocolTranslatorPB implements GetLinkTargetResponseProto rsp = rpcProxy.getLinkTarget(null, req); return rsp.hasTargetPath() ? rsp.getTargetPath() : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1107,7 +1154,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convertLocatedBlockProto( rpcProxy.updateBlockForPipeline(null, req).getBlock()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1125,7 +1173,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.updatePipeline(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1142,7 +1191,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return resp.hasToken() ? PBHelperClient.convertDelegationToken(resp.getToken()) : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1156,7 +1206,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.renewDelegationToken(null, req).getNewExpiryTime(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1170,7 +1221,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.cancelDelegationToken(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1183,7 +1235,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.setBalancerBandwidth(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1202,7 +1255,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return rsp.hasDataEncryptionKey() ? PBHelperClient.convert(rsp.getDataEncryptionKey()) : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1214,7 +1268,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.isFileClosed(null, req).getResult(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1235,7 +1290,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.createSnapshot(null, req).getSnapshotPath(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1247,7 +1303,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.deleteSnapshot(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1258,7 +1315,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.allowSnapshot(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1269,7 +1327,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.disallowSnapshot(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1282,7 +1341,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.renameSnapshot(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1300,7 +1360,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1319,7 +1380,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1335,7 +1397,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert(result.getDiffReport()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1356,7 +1419,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert(result.getDiffReport()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1372,7 +1436,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return rpcProxy.addCacheDirective(null, builder.build()).getId(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1388,7 +1453,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } rpcProxy.modifyCacheDirective(null, builder.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1400,7 +1466,8 @@ public class ClientNamenodeProtocolTranslatorPB implements RemoveCacheDirectiveRequestProto.newBuilder(). setId(id).build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1443,7 +1510,8 @@ public class ClientNamenodeProtocolTranslatorPB implements setFilter(PBHelperClient.convert(filter)). build())); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1455,7 +1523,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.addCachePool(null, builder.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1467,7 +1536,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.modifyCachePool(null, builder.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1478,7 +1548,8 @@ public class ClientNamenodeProtocolTranslatorPB implements RemoveCachePoolRequestProto.newBuilder(). setPoolName(cachePoolName).build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1516,7 +1587,8 @@ public class ClientNamenodeProtocolTranslatorPB implements ListCachePoolsRequestProto.newBuilder(). setPrevPoolName(prevKey).build())); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1529,7 +1601,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.modifyAclEntries(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1542,7 +1615,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.removeAclEntries(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1553,7 +1627,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.removeDefaultAcl(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1564,7 +1639,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.removeAcl(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1582,7 +1658,8 @@ public class ClientNamenodeProtocolTranslatorPB implements rpcProxy.setAcl(null, req); } } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1614,7 +1691,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert(rpcProxy.getAclStatus(null, req)); } } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1631,7 +1709,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.createEncryptionZone(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1650,7 +1729,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return null; } } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1671,7 +1751,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return new BatchedListEntries<>(elements, response.getHasMore()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1688,7 +1769,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.setErasureCodingPolicy(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1701,7 +1783,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.unsetErasureCodingPolicy(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1718,7 +1801,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient .convertECTopologyVerifierResultProto(response.getResponse()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1732,7 +1816,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.reencryptEncryptionZone(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1751,7 +1836,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return new BatchedListEntries<>(elements, response.getHasMore()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1766,7 +1852,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.setXAttr(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1782,7 +1869,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return PBHelperClient.convert(rpcProxy.getXAttrs(null, req)); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1795,7 +1883,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return PBHelperClient.convert(rpcProxy.listXAttrs(null, req)); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1807,7 +1896,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.removeXAttr(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1818,7 +1908,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.checkAccess(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1830,7 +1921,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.setStoragePolicy(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1841,7 +1933,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.unsetStoragePolicy(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1853,7 +1946,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert(rpcProxy.getStoragePolicy(null, request) .getStoragePolicy()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1864,7 +1958,8 @@ public class ClientNamenodeProtocolTranslatorPB implements .getStoragePolicies(null, VOID_GET_STORAGE_POLICIES_REQUEST); return PBHelperClient.convertStoragePolicies(response.getPoliciesList()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1874,7 +1969,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return rpcProxy.getCurrentEditLogTxid(null, req).getTxid(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1885,7 +1981,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { return PBHelperClient.convert(rpcProxy.getEditsFromTxid(null, req)); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1907,7 +2004,8 @@ public class ClientNamenodeProtocolTranslatorPB implements .toArray(AddErasureCodingPolicyResponse[]::new); return responses; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1921,7 +2019,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.removeErasureCodingPolicy(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1935,7 +2034,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.enableErasureCodingPolicy(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1949,7 +2049,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.disableErasureCodingPolicy(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1968,7 +2069,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return ecPolicies; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -1983,7 +2085,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return ecCodecs; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -2001,7 +2104,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -2013,7 +2117,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert(rpcProxy.getQuotaUsage(null, req) .getUsage()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -2045,7 +2150,8 @@ public class ClientNamenodeProtocolTranslatorPB implements } return new BatchedListEntries<>(openFileEntries, response.getHasMore()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -2055,7 +2161,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.msync(null, req.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -2066,7 +2173,8 @@ public class ClientNamenodeProtocolTranslatorPB implements try { rpcProxy.satisfyStoragePolicy(null, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -2078,7 +2186,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return PBHelperClient.convert( rpcProxy.getSlowDatanodeReport(null, req).getDatanodeInfoProtoList()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -2102,7 +2211,8 @@ public class ClientNamenodeProtocolTranslatorPB implements return HAServiceProtocol.HAServiceState.INITIALIZING; } } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java index 496a5cf4614..26ee5de2886 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelperClient.java @@ -209,7 +209,7 @@ import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.erasurecode.ECSchema; -import org.apache.hadoop.ipc.ProtobufHelper; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.security.proto.SecurityProtos.TokenProto; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.util.ChunkedArrayList; @@ -237,7 +237,7 @@ public class PBHelperClient { FsAction.values(); private static ByteString getFixedByteString(String key) { - return ProtobufHelper.getFixedByteString(key); + return ShadedProtobufHelper.getFixedByteString(key); } /** @@ -260,7 +260,8 @@ public class PBHelperClient { public static ByteString getByteString(byte[] bytes) { // return singleton to reduce object allocation - return ProtobufHelper.getByteString(bytes); + // return singleton to reduce object allocation + return ShadedProtobufHelper.getByteString(bytes); } public static ShmId convert(ShortCircuitShmIdProto shmId) { @@ -328,7 +329,7 @@ public class PBHelperClient { } public static TokenProto convert(Token tok) { - return ProtobufHelper.protoFromToken(tok); + return ShadedProtobufHelper.protoFromToken(tok); } public static ShortCircuitShmIdProto convert(ShmId shmId) { @@ -814,8 +815,8 @@ public class PBHelperClient { public static Token convert( TokenProto blockToken) { - return (Token) ProtobufHelper - .tokenFromProto(blockToken); + return (Token) ShadedProtobufHelper.tokenFromProto( + blockToken); } // DatanodeId diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java index 126befa01ca..9272c4f7395 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java @@ -33,12 +33,12 @@ import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.GetRe import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListReconfigurablePropertiesRequestProto; import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListReconfigurablePropertiesResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.StartReconfigurationRequestProto; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.security.UserGroupInformation; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -105,7 +105,8 @@ public class ReconfigurationProtocolTranslatorPB implements try { rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -119,7 +120,8 @@ public class ReconfigurationProtocolTranslatorPB implements NULL_CONTROLLER, VOID_GET_RECONFIG_STATUS)); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -131,7 +133,8 @@ public class ReconfigurationProtocolTranslatorPB implements VOID_LIST_RECONFIGURABLE_PROPERTIES); return response.getNameList(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/protocolPB/RouterAdminProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/protocolPB/RouterAdminProtocolTranslatorPB.java index ddd5f29d9d2..3415f246eb4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/protocolPB/RouterAdminProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/protocolPB/RouterAdminProtocolTranslatorPB.java @@ -97,12 +97,11 @@ import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.RemoveMou import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.RemoveMountTableEntryResponsePBImpl; import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.UpdateMountTableEntryRequestPBImpl; import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.UpdateMountTableEntryResponsePBImpl; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; - +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.thirdparty.protobuf.ServiceException; /** @@ -150,7 +149,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.addMountTableEntry(null, proto); return new AddMountTableEntryResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -165,7 +165,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.updateMountTableEntry(null, proto); return new UpdateMountTableEntryResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -180,7 +181,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.removeMountTableEntry(null, proto); return new RemoveMountTableEntryResponsePBImpl(responseProto); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -195,7 +197,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.getMountTableEntries(null, proto); return new GetMountTableEntriesResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -209,7 +212,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.enterSafeMode(null, proto); return new EnterSafeModeResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -223,7 +227,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.leaveSafeMode(null, proto); return new LeaveSafeModeResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -237,7 +242,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.getSafeMode(null, proto); return new GetSafeModeResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -252,7 +258,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.disableNameservice(null, proto); return new DisableNameserviceResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -267,7 +274,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.enableNameservice(null, proto); return new EnableNameserviceResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -281,7 +289,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.getDisabledNameservices(null, proto); return new GetDisabledNameservicesResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -296,7 +305,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.refreshMountTableEntries(null, proto); return new RefreshMountTableEntriesResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -311,7 +321,8 @@ public class RouterAdminProtocolTranslatorPB rpcProxy.getDestination(null, proto); return new GetDestinationResponsePBImpl(response); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } @@ -325,7 +336,8 @@ public class RouterAdminProtocolTranslatorPB return new RefreshSuperUserGroupsConfigurationResponsePBImpl(response) .getStatus(); } catch (ServiceException e) { - throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage()); + + throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage()); } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java index ac850cbe9dd..53d895e13c6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java @@ -29,11 +29,11 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeLifelineProtocol; import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; @@ -99,7 +99,8 @@ public class DatanodeLifelineProtocolClientSideTranslatorPB implements try { rpcProxy.sendLifeline(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java index d62729b963e..85b440a6f81 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java @@ -61,11 +61,11 @@ import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport; import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; @@ -126,7 +126,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements try { resp = rpcProxy.registerDatanode(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } return PBHelper.convert(resp.getRegistration()); } @@ -167,7 +168,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements try { resp = rpcProxy.sendHeartbeat(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } DatanodeCommand[] cmds = new DatanodeCommand[resp.getCmdsList().size()]; int index = 0; @@ -218,7 +220,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements try { resp = rpcProxy.blockReport(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } return resp.hasCmd() ? PBHelper.convert(resp.getCmd()) : null; } @@ -238,7 +241,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements try { resp = rpcProxy.cacheReport(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } if (resp.hasCmd()) { return PBHelper.convert(resp.getCmd()); @@ -267,7 +271,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements try { rpcProxy.blockReceivedAndDeleted(NULL_CONTROLLER, builder.build()); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } @@ -280,7 +285,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements try { rpcProxy.errorReport(NULL_CONTROLLER, req); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } @@ -290,7 +296,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements return PBHelper.convert(rpcProxy.versionRequest(NULL_CONTROLLER, VOID_VERSION_REQUEST).getInfo()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -305,7 +312,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements try { rpcProxy.reportBadBlocks(NULL_CONTROLLER, req); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } @@ -330,7 +338,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements try { rpcProxy.commitBlockSynchronization(NULL_CONTROLLER, req); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java index 289845f77d3..4b92cfe1a4b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InMemoryAliasMapProtocolClientSideTranslatorPB.java @@ -16,6 +16,7 @@ */ package org.apache.hadoop.hdfs.protocolPB; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; @@ -30,7 +31,6 @@ import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMapProtocol; import org.apache.hadoop.hdfs.server.common.FileRegion; import org.apache.hadoop.hdfs.server.namenode.ha.AbstractNNFailoverProxyProvider; import org.apache.hadoop.hdfs.server.namenode.ha.InMemoryAliasMapFailoverProxyProvider; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.slf4j.Logger; @@ -158,7 +158,8 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB } } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -186,7 +187,8 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB return Optional.empty(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -209,7 +211,8 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB try { rpcProxy.write(null, request); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -220,7 +223,8 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB BlockPoolRequestProto.newBuilder().build()); return response.getBlockPoolId(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java index 403fb53b543..fb2a09e07a1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java @@ -34,11 +34,11 @@ import org.apache.hadoop.hdfs.protocol.proto.InterDatanodeProtocolProtos.UpdateR import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock; import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.thirdparty.protobuf.RpcController; @@ -82,7 +82,8 @@ public class InterDatanodeProtocolTranslatorPB implements try { resp = rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } if (!resp.getReplicaFound()) { // No replica found on the remote node. @@ -112,7 +113,8 @@ public class InterDatanodeProtocolTranslatorPB implements return rpcProxy.updateReplicaUnderRecovery(NULL_CONTROLLER, req ).getStorageUuid(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java index 91be64f9492..65dbf42ea6b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/JournalProtocolTranslatorPB.java @@ -29,11 +29,11 @@ import org.apache.hadoop.hdfs.protocol.proto.JournalProtocolProtos.StartLogSegme import org.apache.hadoop.hdfs.server.protocol.FenceResponse; import org.apache.hadoop.hdfs.server.protocol.JournalInfo; import org.apache.hadoop.hdfs.server.protocol.JournalProtocol; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.thirdparty.protobuf.RpcController; import org.apache.hadoop.thirdparty.protobuf.ServiceException; @@ -72,7 +72,8 @@ public class JournalProtocolTranslatorPB implements ProtocolMetaInterface, try { rpcProxy.journal(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -87,7 +88,8 @@ public class JournalProtocolTranslatorPB implements ProtocolMetaInterface, try { rpcProxy.startLogSegment(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -101,7 +103,8 @@ public class JournalProtocolTranslatorPB implements ProtocolMetaInterface, return new FenceResponse(resp.getPreviousEpoch(), resp.getLastTransactionId(), resp.getInSync()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java index f185b195e0b..14392acc4b3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/NamenodeProtocolTranslatorPB.java @@ -51,12 +51,12 @@ import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.thirdparty.protobuf.RpcController; import org.apache.hadoop.thirdparty.protobuf.ServiceException; @@ -111,7 +111,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, return PBHelper.convert(rpcProxy.getBlocks(NULL_CONTROLLER, req) .getBlocks()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -122,7 +123,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, VOID_GET_BLOCKKEYS_REQUEST); return rsp.hasKeys() ? PBHelper.convert(rsp.getKeys()) : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -132,7 +134,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, return rpcProxy.getTransactionId(NULL_CONTROLLER, VOID_GET_TRANSACTIONID_REQUEST).getTxId(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -142,7 +145,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, return rpcProxy.getMostRecentCheckpointTxId(NULL_CONTROLLER, GetMostRecentCheckpointTxIdRequestProto.getDefaultInstance()).getTxId(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -152,7 +156,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, return PBHelper.convert(rpcProxy.rollEditLog(NULL_CONTROLLER, VOID_ROLL_EDITLOG_REQUEST).getSignature()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -162,7 +167,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, return PBHelper.convert(rpcProxy.versionRequest(NULL_CONTROLLER, VOID_VERSION_REQUEST).getInfo()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -175,7 +181,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, try { rpcProxy.errorReport(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -189,7 +196,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, rpcProxy.registerSubordinateNamenode(NULL_CONTROLLER, req) .getRegistration()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -202,7 +210,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, try { cmd = rpcProxy.startCheckpoint(NULL_CONTROLLER, req).getCommand(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } return PBHelper.convert(cmd); } @@ -216,7 +225,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, try { rpcProxy.endCheckpoint(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -229,7 +239,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, return PBHelper.convert(rpcProxy.getEditLogManifest(NULL_CONTROLLER, req) .getManifest()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -249,7 +260,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, NULL_CONTROLLER, req); return response.getIsUpgradeFinalized(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -262,7 +274,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, NULL_CONTROLLER, req); return response.getIsRollingUpgrade(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -275,7 +288,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol, rpcProxy.getNextSPSPath(NULL_CONTROLLER, req); return nextSPSPath.hasSpsPath() ? nextSPSPath.getSpsPath() : null; } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/InterQJournalProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/InterQJournalProtocolTranslatorPB.java index f84604cf5ae..c03e02b4829 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/InterQJournalProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/InterQJournalProtocolTranslatorPB.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs.qjournal.protocolPB; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.thirdparty.protobuf.RpcController; import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.classification.InterfaceAudience; @@ -27,7 +28,6 @@ import org.apache.hadoop.hdfs.qjournal.protocol.InterQJournalProtocol; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetEditLogManifestRequestProto; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetEditLogManifestResponseProto; import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; @@ -76,7 +76,8 @@ public class InterQJournalProtocolTranslatorPB implements ProtocolMetaInterface, req.build() ); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java index 0a7fa268894..e0913245e47 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/protocolPB/QJournalProtocolTranslatorPB.java @@ -63,11 +63,11 @@ import org.apache.hadoop.hdfs.qjournal.protocol.RequestInfo; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.protocol.JournalProtocol; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.thirdparty.protobuf.RpcController; import org.apache.hadoop.thirdparty.protobuf.ServiceException; @@ -108,7 +108,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, NULL_CONTROLLER, req.build()); return resp.getIsFormatted(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -125,7 +126,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, } return rpcProxy.getJournalState(NULL_CONTROLLER, req.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -151,7 +153,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, rpcProxy.format(NULL_CONTROLLER, req.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -172,7 +175,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, return rpcProxy.newEpoch(NULL_CONTROLLER, req.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -190,7 +194,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, try { rpcProxy.journal(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -201,7 +206,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, .setReqInfo(convert(reqInfo)) .build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -230,7 +236,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, try { rpcProxy.startLogSegment(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -246,7 +253,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, try { rpcProxy.finalizeLogSegment(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -260,7 +268,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, try { rpcProxy.purgeLogs(NULL_CONTROLLER, req); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -281,7 +290,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, req.build() ); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -299,7 +309,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, } return rpcProxy.getJournaledEdits(NULL_CONTROLLER, req.build()); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } @@ -313,7 +324,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, .setSegmentTxId(segmentTxId) .build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -328,7 +340,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, .setFromURL(fromUrl.toExternalForm()) .build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -346,7 +359,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, .setJid(convertJournalId(jid)); rpcProxy.doPreUpgrade(NULL_CONTROLLER, req.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -359,7 +373,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, .setSInfo(PBHelper.convert(sInfo)) .build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -374,7 +389,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, } rpcProxy.doFinalize(NULL_CONTROLLER, req.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -397,7 +413,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, NULL_CONTROLLER, req.build()); return response.getCanRollBack(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -413,7 +430,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, } rpcProxy.doRollback(NULL_CONTROLLER, req.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -432,7 +450,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, } rpcProxy.discardSegments(NULL_CONTROLLER, req.build()); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } @@ -451,7 +470,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface, NULL_CONTROLLER, req.build()); return response.getResultCTime(); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + + throw ShadedProtobufHelper.getRemoteException(e); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/protocolPB/HSAdminRefreshProtocolClientSideTranslatorPB.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/protocolPB/HSAdminRefreshProtocolClientSideTranslatorPB.java index c4ccab8d882..c35f4acf897 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/protocolPB/HSAdminRefreshProtocolClientSideTranslatorPB.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/protocolPB/HSAdminRefreshProtocolClientSideTranslatorPB.java @@ -22,10 +22,10 @@ import java.io.Closeable; import java.io.IOException; import org.apache.hadoop.classification.InterfaceAudience.Private; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol; import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocolPB; import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshAdminAclsRequestProto; @@ -77,7 +77,8 @@ public class HSAdminRefreshProtocolClientSideTranslatorPB implements rpcProxy.refreshAdminAcls(NULL_CONTROLLER, VOID_REFRESH_ADMIN_ACLS_REQUEST); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } @@ -88,7 +89,8 @@ public class HSAdminRefreshProtocolClientSideTranslatorPB implements rpcProxy.refreshLoadedJobCache(NULL_CONTROLLER, VOID_REFRESH_LOADED_JOB_CACHE_REQUEST); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } @@ -98,7 +100,8 @@ public class HSAdminRefreshProtocolClientSideTranslatorPB implements rpcProxy.refreshJobRetentionSettings(NULL_CONTROLLER, VOID_REFRESH_JOB_RETENTION_SETTINGS_REQUEST); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } @@ -108,7 +111,8 @@ public class HSAdminRefreshProtocolClientSideTranslatorPB implements rpcProxy.refreshLogRetentionSettings(NULL_CONTROLLER, VOID_REFRESH_LOG_RETENTION_SETTINGS_REQUEST); } catch (ServiceException se) { - throw ProtobufHelper.extractRemoteException(se); + + throw ShadedProtobufHelper.getRemoteException(se); } } diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml index df142747087..0a520f1910d 100644 --- a/hadoop-project/pom.xml +++ b/hadoop-project/pom.xml @@ -84,10 +84,13 @@ 1.1 - + + 2.5.0 + + compile + provided - 3.7.1 ${env.HADOOP_PROTOC_PATH} diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java index 2cd6f3a9491..c8fcae52b08 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java @@ -24,9 +24,9 @@ import java.net.InetSocketAddress; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufHelper; import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; +import org.apache.hadoop.ipc.internal.ShadedProtobufHelper; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto; @@ -223,7 +223,7 @@ public class ResourceManagerAdministrationProtocolPBClientImpl implements Resour return (String[]) responseProto.getGroupsList().toArray( new String[responseProto.getGroupsCount()]); } catch (ServiceException e) { - throw ProtobufHelper.extractRemoteException(e); + throw ShadedProtobufHelper.getRemoteException(e); } }