HADOOP-18487. making protobuf 2.5 optional
* new org.apache.hadoop.ipc.internal for internal only classes * with a ShadedProtobufHelper in there which has shaded protobuf refs only, so guaranteed not to need protobuf-2.5 on the CP * findbugs, protobuf source patch etc * can spec new export policies for the protobuf jar in hadoop-common and other places it is referenced. hadoop-common back @compile Change-Id: I61a99d2fd673259ab50d000f28a29e8a38aaf1b1
This commit is contained in:
parent
39f896ee54
commit
5f86dd58bd
|
@ -451,7 +451,7 @@
|
|||
</Match>
|
||||
|
||||
<Match>
|
||||
<Class name="org.apache.hadoop.ipc.ProtobufHelper" />
|
||||
<Class name="org.apache.hadoop.ipc.ShadedProtobufHelper" />
|
||||
<Method name="getFixedByteString" />
|
||||
<Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
|
||||
</Match>
|
||||
|
|
|
@ -251,10 +251,11 @@
|
|||
<artifactId>re2j</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<!-- Needed for compilation, though no longer in production. -->
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<scope>${protobuf2.scope}</scope>
|
||||
<scope>${common.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.gson</groupId>
|
||||
|
@ -484,11 +485,11 @@
|
|||
<!--These classes have direct Protobuf references for backward compatibility reasons-->
|
||||
<excludes>
|
||||
<exclude>**/ProtobufHelper.java</exclude>
|
||||
<exclude>**/RpcWritable.java</exclude>
|
||||
<exclude>**/ProtobufRpcEngineCallback.java</exclude>
|
||||
<exclude>**/ProtobufRpcEngine.java</exclude>
|
||||
<exclude>**/ProtobufRpcEngine2.java</exclude>
|
||||
<exclude>**/ProtobufRpcEngineProtos.java</exclude>
|
||||
<exclude>**/ProtobufWrapperLegacy.java</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</execution>
|
||||
|
|
|
@ -37,10 +37,10 @@ import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.MonitorHealthRequestPr
|
|||
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto;
|
||||
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto;
|
||||
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToObserverRequestProto;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
|
@ -87,7 +87,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -100,7 +100,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
|
|||
|
||||
rpcProxy.transitionToActive(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -112,7 +112,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
|
|||
.setReqInfo(convert(reqInfo)).build();
|
||||
rpcProxy.transitionToStandby(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -125,7 +125,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
|
|||
.setReqInfo(convert(reqInfo)).build();
|
||||
rpcProxy.transitionToObserver(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -136,7 +136,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements
|
|||
status = rpcProxy.getServiceStatus(NULL_CONTROLLER,
|
||||
GET_SERVICE_STATUS_REQ);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
|
||||
HAServiceStatus ret = new HAServiceStatus(
|
||||
|
|
|
@ -27,10 +27,10 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.ha.ZKFCProtocol;
|
||||
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto;
|
||||
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.AccessControlException;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
|
@ -63,7 +63,8 @@ public class ZKFCProtocolClientSideTranslatorPB implements
|
|||
.build();
|
||||
rpcProxy.cedeActive(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,7 +74,8 @@ public class ZKFCProtocolClientSideTranslatorPB implements
|
|||
rpcProxy.gracefulFailover(NULL_CONTROLLER,
|
||||
GracefulFailoverRequestProto.getDefaultInstance());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,11 +18,10 @@
|
|||
package org.apache.hadoop.ipc;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
|
@ -31,9 +30,15 @@ import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
|||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
/**
|
||||
* Helper methods for protobuf related RPC implementation
|
||||
* Helper methods for protobuf related RPC implementation.
|
||||
* This is deprecated because it references protbuf 2.5 classes
|
||||
* as well as the shaded one -and may need an unshaded protobuf
|
||||
* JAR on the classpath during complicated.
|
||||
* It should not be used internally; it is retained in case other
|
||||
* applications use it.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@Deprecated
|
||||
public final class ProtobufHelper {
|
||||
|
||||
private ProtobufHelper() {
|
||||
|
@ -41,22 +46,14 @@ public final class ProtobufHelper {
|
|||
}
|
||||
|
||||
/**
|
||||
* Return the IOException thrown by the remote server wrapped in
|
||||
* Return the IOException thrown by the remote server wrapped in
|
||||
* ServiceException as cause.
|
||||
* The signature of this method changes with updates to the hadoop-thirdparty
|
||||
* shaded protobuf library.
|
||||
* @param se ServiceException that wraps IO exception thrown by the server
|
||||
* @return Exception wrapped in ServiceException or
|
||||
* a new IOException that wraps the unexpected ServiceException.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public static IOException extractRemoteException(ServiceException se) {
|
||||
Throwable e = se.getCause();
|
||||
if (e == null) {
|
||||
return new IOException(se);
|
||||
}
|
||||
return e instanceof IOException ? (IOException) e : new IOException(se);
|
||||
public static IOException getRemoteException(ServiceException se) {
|
||||
return ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -79,29 +76,13 @@ public final class ProtobufHelper {
|
|||
return e instanceof IOException ? (IOException) e : new IOException(se);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map used to cache fixed strings to ByteStrings. Since there is no
|
||||
* automatic expiration policy, only use this for strings from a fixed, small
|
||||
* set.
|
||||
* <p/>
|
||||
* This map should not be accessed directly. Used the getFixedByteString
|
||||
* methods instead.
|
||||
*/
|
||||
private final static ConcurrentHashMap<Object, ByteString>
|
||||
FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
* Get the ByteString for frequently used fixed and small set strings.
|
||||
* @param key string
|
||||
* @return the ByteString for frequently used fixed and small set strings.
|
||||
*/
|
||||
public static ByteString getFixedByteString(Text key) {
|
||||
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
|
||||
if (value == null) {
|
||||
value = ByteString.copyFromUtf8(key.toString());
|
||||
FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value);
|
||||
}
|
||||
return value;
|
||||
return ShadedProtobufHelper.getFixedByteString(key);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -110,34 +91,20 @@ public final class ProtobufHelper {
|
|||
* @return ByteString for frequently used fixed and small set strings.
|
||||
*/
|
||||
public static ByteString getFixedByteString(String key) {
|
||||
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
|
||||
if (value == null) {
|
||||
value = ByteString.copyFromUtf8(key);
|
||||
FIXED_BYTESTRING_CACHE.put(key, value);
|
||||
}
|
||||
return value;
|
||||
return ShadedProtobufHelper.getFixedByteString(key);
|
||||
}
|
||||
|
||||
public static ByteString getByteString(byte[] bytes) {
|
||||
// return singleton to reduce object allocation
|
||||
return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes);
|
||||
return ShadedProtobufHelper.getByteString(bytes);
|
||||
}
|
||||
|
||||
public static Token<? extends TokenIdentifier> tokenFromProto(
|
||||
TokenProto tokenProto) {
|
||||
Token<? extends TokenIdentifier> token = new Token<>(
|
||||
tokenProto.getIdentifier().toByteArray(),
|
||||
tokenProto.getPassword().toByteArray(), new Text(tokenProto.getKind()),
|
||||
new Text(tokenProto.getService()));
|
||||
return token;
|
||||
return ShadedProtobufHelper.tokenFromProto(tokenProto);
|
||||
}
|
||||
|
||||
public static TokenProto protoFromToken(Token<?> tok) {
|
||||
TokenProto.Builder builder = TokenProto.newBuilder().
|
||||
setIdentifier(getByteString(tok.getIdentifier())).
|
||||
setPassword(getByteString(tok.getPassword())).
|
||||
setKindBytes(getFixedByteString(tok.getKind())).
|
||||
setServiceBytes(getFixedByteString(tok.getService()));
|
||||
return builder.build();
|
||||
return ShadedProtobufHelper.protoFromToken(tok);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -37,7 +37,7 @@ import org.apache.hadoop.util.Preconditions;
|
|||
* which is a protobuf message unless that condition is met.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
class ProtobufWrapperLegacy extends RpcWritable {
|
||||
public class ProtobufWrapperLegacy extends RpcWritable {
|
||||
|
||||
private com.google.protobuf.Message message;
|
||||
|
||||
|
@ -48,19 +48,20 @@ class ProtobufWrapperLegacy extends RpcWritable {
|
|||
* @param message message to wrap.
|
||||
* @throws IllegalArgumentException if the class is not a protobuf message.
|
||||
*/
|
||||
ProtobufWrapperLegacy(Object message) {
|
||||
public ProtobufWrapperLegacy(Object message) {
|
||||
Preconditions.checkArgument(isUnshadedProtobufMessage(message),
|
||||
"message class is not an unshaded protobuf message %s",
|
||||
message.getClass());
|
||||
this.message = (com.google.protobuf.Message) message;
|
||||
}
|
||||
|
||||
com.google.protobuf.Message getMessage() {
|
||||
public com.google.protobuf.Message getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
void writeTo(ResponseBuffer out) throws IOException {
|
||||
public void writeTo(ResponseBuffer out) throws IOException {
|
||||
int length = message.getSerializedSize();
|
||||
length += com.google.protobuf.CodedOutputStream.
|
||||
computeUInt32SizeNoTag(length);
|
||||
|
@ -70,7 +71,7 @@ class ProtobufWrapperLegacy extends RpcWritable {
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
<T> T readFrom(ByteBuffer bb) throws IOException {
|
||||
protected <T> T readFrom(ByteBuffer bb) throws IOException {
|
||||
// using the parser with a byte[]-backed coded input stream is the
|
||||
// most efficient way to deserialize a protobuf. it has a direct
|
||||
// path to the PB ctor that doesn't create multi-layered streams
|
||||
|
@ -93,14 +94,15 @@ class ProtobufWrapperLegacy extends RpcWritable {
|
|||
* Has protobuf been looked for and is known as absent?
|
||||
* Saves a check on every message.
|
||||
*/
|
||||
private static final AtomicBoolean PROTOBUF_KNOWN_NOT_FOUND = new AtomicBoolean(false);
|
||||
private static final AtomicBoolean PROTOBUF_KNOWN_NOT_FOUND =
|
||||
new AtomicBoolean(false);
|
||||
|
||||
/**
|
||||
* Is a message an unshaded protobuf message?
|
||||
* @param payload payload
|
||||
* @return true if protobuf.jar is on the classpath and the payload is a Message
|
||||
*/
|
||||
static boolean isUnshadedProtobufMessage(Object payload) {
|
||||
public static boolean isUnshadedProtobufMessage(Object payload) {
|
||||
if (PROTOBUF_KNOWN_NOT_FOUND.get()) {
|
||||
// protobuf is known to be absent. fail fast without examining
|
||||
// jars or generating exceptions.
|
||||
|
@ -111,7 +113,8 @@ class ProtobufWrapperLegacy extends RpcWritable {
|
|||
// an unshaded protobuf message
|
||||
// this relies on classloader caching for performance
|
||||
try {
|
||||
Class<?> protobufMessageClazz = Class.forName("com.google.protobuf.Message");
|
||||
Class<?> protobufMessageClazz =
|
||||
Class.forName("com.google.protobuf.Message");
|
||||
return protobufMessageClazz.isAssignableFrom(payload.getClass());
|
||||
} catch (ClassNotFoundException e) {
|
||||
PROTOBUF_KNOWN_NOT_FOUND.set(true);
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.util.TreeMap;
|
|||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureRequestProto;
|
||||
import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
|
||||
import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto;
|
||||
|
@ -126,7 +127,8 @@ public class RpcClientUtil {
|
|||
resp = protocolInfoProxy.getProtocolSignature(NULL_CONTROLLER,
|
||||
builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
versionMap = convertProtocolSignatureProtos(resp
|
||||
.getProtocolSignatureList());
|
||||
|
|
|
@ -0,0 +1,144 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.ipc.internal;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
/**
|
||||
* Helper methods for protobuf related RPC implementation using the
|
||||
* hadoop {@code org.apache.hadoop.thirdparty.protobuf} shaded version.
|
||||
* This is <i>absolutely private</i>.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public final class ShadedProtobufHelper {
|
||||
|
||||
private ShadedProtobufHelper() {
|
||||
// Hidden constructor for class with only static helper methods
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the IOException thrown by the remote server wrapped in
|
||||
* ServiceException as cause.
|
||||
* The signature of this method changes with updates to the hadoop-thirdparty
|
||||
* shaded protobuf library.
|
||||
* @param se ServiceException that wraps IO exception thrown by the server
|
||||
* @return Exception wrapped in ServiceException or
|
||||
* a new IOException that wraps the unexpected ServiceException.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public static IOException getRemoteException(ServiceException se) {
|
||||
Throwable e = se.getCause();
|
||||
if (e == null) {
|
||||
return new IOException(se);
|
||||
}
|
||||
return e instanceof IOException ? (IOException) e : new IOException(se);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map used to cache fixed strings to ByteStrings. Since there is no
|
||||
* automatic expiration policy, only use this for strings from a fixed, small
|
||||
* set.
|
||||
* <p/>
|
||||
* This map should not be accessed directly. Used the getFixedByteString
|
||||
* methods instead.
|
||||
*/
|
||||
private static final ConcurrentHashMap<Object, ByteString>
|
||||
FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
* Get the ByteString for frequently used fixed and small set strings.
|
||||
* @param key string
|
||||
* @return the ByteString for frequently used fixed and small set strings.
|
||||
*/
|
||||
public static ByteString getFixedByteString(Text key) {
|
||||
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
|
||||
if (value == null) {
|
||||
value = ByteString.copyFromUtf8(key.toString());
|
||||
FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the ByteString for frequently used fixed and small set strings.
|
||||
* @param key string
|
||||
* @return ByteString for frequently used fixed and small set strings.
|
||||
*/
|
||||
public static ByteString getFixedByteString(String key) {
|
||||
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
|
||||
if (value == null) {
|
||||
value = ByteString.copyFromUtf8(key);
|
||||
FIXED_BYTESTRING_CACHE.put(key, value);
|
||||
}
|
||||
return ShadedProtobufHelper.getFixedByteString(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the byte string of a non-null byte array.
|
||||
* If the array is 0 bytes long, return a singleton to reduce object allocation.
|
||||
* @param bytes bytes to convert.
|
||||
* @return a value
|
||||
*/
|
||||
public static ByteString getByteString(byte[] bytes) {
|
||||
// return singleton to reduce object allocation
|
||||
return (bytes.length == 0)
|
||||
? ByteString.EMPTY
|
||||
: ByteString.copyFrom(bytes);
|
||||
}
|
||||
|
||||
public static Token<? extends TokenIdentifier> tokenFromProto(
|
||||
TokenProto tokenProto) {
|
||||
Token<? extends TokenIdentifier> token = new Token<>(
|
||||
tokenProto.getIdentifier().toByteArray(),
|
||||
tokenProto.getPassword().toByteArray(),
|
||||
new Text(tokenProto.getKind()),
|
||||
new Text(tokenProto.getService()));
|
||||
return token;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create a {@code TokenProto} instance
|
||||
* from a hadoop token.
|
||||
* This builds and caches the fields
|
||||
* (identifier, password, kind, service) but not
|
||||
* renewer or any payload.
|
||||
* @param tok token
|
||||
* @return a marshallable protobuf class.
|
||||
*/
|
||||
public static TokenProto protoFromToken(Token<?> tok) {
|
||||
TokenProto.Builder builder = TokenProto.newBuilder().
|
||||
setIdentifier(getByteString(tok.getIdentifier())).
|
||||
setPassword(getByteString(tok.getPassword())).
|
||||
setKindBytes(getFixedByteString(tok.getKind())).
|
||||
setServiceBytes(getFixedByteString(tok.getService()));
|
||||
return builder.build();
|
||||
}
|
||||
}
|
|
@ -0,0 +1,28 @@
|
|||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* IPC internal classes not for any use by libraries outside
|
||||
* the apache hadoop source tree.
|
||||
*/
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "YARN"})
|
||||
@InterfaceStability.Unstable
|
||||
package org.apache.hadoop.ipc.internal;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
|
@ -25,12 +25,12 @@ import java.util.Arrays;
|
|||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RefreshResponse;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.GenericRefreshProtocol;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto;
|
||||
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto;
|
||||
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto;
|
||||
|
@ -68,7 +68,8 @@ public class GenericRefreshProtocolClientSideTranslatorPB implements
|
|||
GenericRefreshResponseCollectionProto resp = rpcProxy.refresh(NULL_CONTROLLER, request);
|
||||
return unpack(resp);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,11 +21,11 @@ package org.apache.hadoop.ipc.protocolPB;
|
|||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.RefreshCallQueueProtocol;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.ipc.proto.RefreshCallQueueProtocolProtos.RefreshCallQueueRequestProto;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
|
@ -58,7 +58,8 @@ public class RefreshCallQueueProtocolClientSideTranslatorPB implements
|
|||
rpcProxy.refreshCallQueue(NULL_CONTROLLER,
|
||||
VOID_REFRESH_CALL_QUEUE_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
|
||||
package org.apache.hadoop.security;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
|
@ -46,7 +47,6 @@ import org.apache.hadoop.io.IOUtils;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.CredentialsKVProto;
|
||||
|
@ -382,7 +382,7 @@ public class Credentials implements Writable {
|
|||
CredentialsKVProto.Builder kv = CredentialsKVProto.newBuilder().
|
||||
setAliasBytes(ByteString.copyFrom(
|
||||
e.getKey().getBytes(), 0, e.getKey().getLength())).
|
||||
setToken(ProtobufHelper.protoFromToken(e.getValue()));
|
||||
setToken(ShadedProtobufHelper.protoFromToken(e.getValue()));
|
||||
storage.addTokens(kv.build());
|
||||
}
|
||||
|
||||
|
@ -404,7 +404,7 @@ public class Credentials implements Writable {
|
|||
CredentialsProto storage = CredentialsProto.parseDelimitedFrom((DataInputStream)in);
|
||||
for (CredentialsKVProto kv : storage.getTokensList()) {
|
||||
addToken(new Text(kv.getAliasBytes().toByteArray()),
|
||||
ProtobufHelper.tokenFromProto(kv.getToken()));
|
||||
ShadedProtobufHelper.tokenFromProto(kv.getToken()));
|
||||
}
|
||||
for (CredentialsKVProto kv : storage.getSecretsList()) {
|
||||
addSecretKey(new Text(kv.getAliasBytes().toByteArray()),
|
||||
|
|
|
@ -21,10 +21,10 @@ package org.apache.hadoop.security.protocolPB;
|
|||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
|
||||
import org.apache.hadoop.security.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclRequestProto;
|
||||
|
||||
|
@ -58,7 +58,8 @@ public class RefreshAuthorizationPolicyProtocolClientSideTranslatorPB implements
|
|||
rpcProxy.refreshServiceAcl(NULL_CONTROLLER,
|
||||
VOID_REFRESH_SERVICE_ACL_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -21,10 +21,10 @@ package org.apache.hadoop.security.protocolPB;
|
|||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
|
||||
import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationRequestProto;
|
||||
import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsRequestProto;
|
||||
|
@ -63,7 +63,8 @@ public class RefreshUserMappingsProtocolClientSideTranslatorPB implements
|
|||
rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER,
|
||||
VOID_REFRESH_USER_TO_GROUPS_MAPPING_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -73,7 +74,8 @@ public class RefreshUserMappingsProtocolClientSideTranslatorPB implements
|
|||
rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER,
|
||||
VOID_REFRESH_SUPERUSER_GROUPS_CONFIGURATION_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,10 +20,11 @@ package org.apache.hadoop.tools.protocolPB;
|
|||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.tools.GetUserMappingsProtocol;
|
||||
import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserRequestProto;
|
||||
import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto;
|
||||
|
@ -56,7 +57,8 @@ public class GetUserMappingsProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
resp = rpcProxy.getGroupsForUser(NULL_CONTROLLER, request);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
return resp.getGroupsList().toArray(new String[resp.getGroupsCount()]);
|
||||
}
|
||||
|
|
|
@ -22,28 +22,30 @@ import java.io.IOException;
|
|||
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.test.AbstractHadoopTestBase;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.ProtobufHelper.extractRemoteException;
|
||||
import static org.apache.hadoop.test.LambdaTestUtils.verifyCause;
|
||||
|
||||
/**
|
||||
* Test methods in {@link ProtobufHelper}.
|
||||
* Test methods in {@link ShadedProtobufHelper}.
|
||||
*/
|
||||
public class TestProtobufHelper extends AbstractHadoopTestBase {
|
||||
public class TestShadedProtobufHelper extends AbstractHadoopTestBase {
|
||||
|
||||
@Test
|
||||
public void testExtractRemoteExceptionNoCause() throws Throwable {
|
||||
ServiceException source = new ServiceException("empty");
|
||||
IOException ex = extractRemoteException(source);
|
||||
|
||||
IOException ex = ShadedProtobufHelper.getRemoteException(source);
|
||||
verifyCause(ServiceException.class, ex);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractRemoteExceptionIOECause() throws Throwable {
|
||||
IOException source = new IOException("ioe");
|
||||
IOException ex = extractRemoteException(
|
||||
|
||||
IOException ex = ShadedProtobufHelper.getRemoteException(
|
||||
new ServiceException(source));
|
||||
// if not the same, throw
|
||||
if (!(ex == source)) {
|
||||
|
@ -54,7 +56,8 @@ public class TestProtobufHelper extends AbstractHadoopTestBase {
|
|||
@Test
|
||||
public void testExtractRemoteExceptionOtherCause() throws Throwable {
|
||||
NullPointerException source = new NullPointerException("npe");
|
||||
IOException ex = extractRemoteException(
|
||||
|
||||
IOException ex = ShadedProtobufHelper.getRemoteException(
|
||||
new ServiceException(source));
|
||||
// if not the same, throw
|
||||
ServiceException c1 = verifyCause(ServiceException.class, ex);
|
|
@ -67,12 +67,12 @@ import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.DiskBa
|
|||
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
||||
import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus;
|
||||
import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus.Result;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
|
@ -200,7 +200,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
try {
|
||||
return rpcProxy.getReplicaVisibleLength(NULL_CONTROLLER, req).getLength();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,7 +209,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.refreshNamenodes(NULL_CONTROLLER, VOID_REFRESH_NAMENODES);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -220,7 +220,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.deleteBlockPool(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -235,7 +235,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
try {
|
||||
resp = rpcProxy.getBlockLocalPathInfo(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return new BlockLocalPathInfo(PBHelperClient.convert(resp.getBlock()),
|
||||
resp.getLocalPath(), resp.getLocalMetaPath());
|
||||
|
@ -260,7 +260,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.shutdownDatanode(NULL_CONTROLLER, request);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -269,7 +269,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.evictWriters(NULL_CONTROLLER, VOID_EVICT_WRITERS);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -281,7 +281,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
VOID_GET_DATANODE_INFO);
|
||||
return PBHelperClient.convert(response.getLocalInfo());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -290,7 +290,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -304,7 +304,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
NULL_CONTROLLER,
|
||||
VOID_GET_RECONFIG_STATUS));
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -316,7 +316,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
VOID_LIST_RECONFIGURABLE_PROPERTIES);
|
||||
return response.getNameList();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -331,7 +331,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
}
|
||||
rpcProxy.triggerBlockReport(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -343,7 +343,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
VOID_GET_BALANCER_BANDWIDTH);
|
||||
return response.getBandwidth();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -374,7 +374,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
.build();
|
||||
rpcProxy.submitDiskBalancerPlan(NULL_CONTROLLER, request);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -392,7 +392,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
.setPlanID(planID).build();
|
||||
rpcProxy.cancelDiskBalancerPlan(NULL_CONTROLLER, request);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -417,7 +417,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
response.hasPlanFile() ? response.getPlanFile() : null,
|
||||
response.hasCurrentStatus() ? response.getCurrentStatus() : null);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -430,7 +430,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
rpcProxy.getDiskBalancerSetting(NULL_CONTROLLER, request);
|
||||
return response.hasValue() ? response.getValue() : null;
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -450,7 +450,7 @@ public class ClientDatanodeProtocolTranslatorPB implements
|
|||
}
|
||||
return volumeInfoList;
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -209,7 +209,7 @@ import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.SlotId;
|
|||
import org.apache.hadoop.io.EnumSetWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.util.ChunkedArrayList;
|
||||
|
@ -237,7 +237,7 @@ public class PBHelperClient {
|
|||
FsAction.values();
|
||||
|
||||
private static ByteString getFixedByteString(String key) {
|
||||
return ProtobufHelper.getFixedByteString(key);
|
||||
return ShadedProtobufHelper.getFixedByteString(key);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -260,7 +260,8 @@ public class PBHelperClient {
|
|||
|
||||
public static ByteString getByteString(byte[] bytes) {
|
||||
// return singleton to reduce object allocation
|
||||
return ProtobufHelper.getByteString(bytes);
|
||||
// return singleton to reduce object allocation
|
||||
return ShadedProtobufHelper.getByteString(bytes);
|
||||
}
|
||||
|
||||
public static ShmId convert(ShortCircuitShmIdProto shmId) {
|
||||
|
@ -328,7 +329,7 @@ public class PBHelperClient {
|
|||
}
|
||||
|
||||
public static TokenProto convert(Token<?> tok) {
|
||||
return ProtobufHelper.protoFromToken(tok);
|
||||
return ShadedProtobufHelper.protoFromToken(tok);
|
||||
}
|
||||
|
||||
public static ShortCircuitShmIdProto convert(ShmId shmId) {
|
||||
|
@ -814,8 +815,8 @@ public class PBHelperClient {
|
|||
|
||||
public static Token<BlockTokenIdentifier> convert(
|
||||
TokenProto blockToken) {
|
||||
return (Token<BlockTokenIdentifier>) ProtobufHelper
|
||||
.tokenFromProto(blockToken);
|
||||
return (Token<BlockTokenIdentifier>) ShadedProtobufHelper.tokenFromProto(
|
||||
blockToken);
|
||||
}
|
||||
|
||||
// DatanodeId
|
||||
|
|
|
@ -33,12 +33,12 @@ import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.GetRe
|
|||
import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListReconfigurablePropertiesRequestProto;
|
||||
import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListReconfigurablePropertiesResponseProto;
|
||||
import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.StartReconfigurationRequestProto;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -105,7 +105,8 @@ public class ReconfigurationProtocolTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -119,7 +120,8 @@ public class ReconfigurationProtocolTranslatorPB implements
|
|||
NULL_CONTROLLER,
|
||||
VOID_GET_RECONFIG_STATUS));
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -131,7 +133,8 @@ public class ReconfigurationProtocolTranslatorPB implements
|
|||
VOID_LIST_RECONFIGURABLE_PROPERTIES);
|
||||
return response.getNameList();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -97,12 +97,11 @@ import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.RemoveMou
|
|||
import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.RemoveMountTableEntryResponsePBImpl;
|
||||
import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.UpdateMountTableEntryRequestPBImpl;
|
||||
import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.UpdateMountTableEntryResponsePBImpl;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
/**
|
||||
|
@ -150,7 +149,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.addMountTableEntry(null, proto);
|
||||
return new AddMountTableEntryResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -165,7 +165,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.updateMountTableEntry(null, proto);
|
||||
return new UpdateMountTableEntryResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -180,7 +181,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.removeMountTableEntry(null, proto);
|
||||
return new RemoveMountTableEntryResponsePBImpl(responseProto);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -195,7 +197,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.getMountTableEntries(null, proto);
|
||||
return new GetMountTableEntriesResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,7 +212,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.enterSafeMode(null, proto);
|
||||
return new EnterSafeModeResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -223,7 +227,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.leaveSafeMode(null, proto);
|
||||
return new LeaveSafeModeResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -237,7 +242,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.getSafeMode(null, proto);
|
||||
return new GetSafeModeResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -252,7 +258,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.disableNameservice(null, proto);
|
||||
return new DisableNameserviceResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -267,7 +274,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.enableNameservice(null, proto);
|
||||
return new EnableNameserviceResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -281,7 +289,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.getDisabledNameservices(null, proto);
|
||||
return new GetDisabledNameservicesResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -296,7 +305,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.refreshMountTableEntries(null, proto);
|
||||
return new RefreshMountTableEntriesResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -311,7 +321,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
rpcProxy.getDestination(null, proto);
|
||||
return new GetDestinationResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -325,7 +336,8 @@ public class RouterAdminProtocolTranslatorPB
|
|||
return new RefreshSuperUserGroupsConfigurationResponsePBImpl(response)
|
||||
.getStatus();
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.extractRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(ShadedProtobufHelper.getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -29,11 +29,11 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeLifelineProtocol;
|
|||
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
|
@ -99,7 +99,8 @@ public class DatanodeLifelineProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.sendLifeline(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -61,11 +61,11 @@ import org.apache.hadoop.hdfs.server.protocol.StorageBlockReport;
|
|||
import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
|
||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
|
@ -126,7 +126,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
resp = rpcProxy.registerDatanode(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
return PBHelper.convert(resp.getRegistration());
|
||||
}
|
||||
|
@ -167,7 +168,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
resp = rpcProxy.sendHeartbeat(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
DatanodeCommand[] cmds = new DatanodeCommand[resp.getCmdsList().size()];
|
||||
int index = 0;
|
||||
|
@ -218,7 +220,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
resp = rpcProxy.blockReport(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
return resp.hasCmd() ? PBHelper.convert(resp.getCmd()) : null;
|
||||
}
|
||||
|
@ -238,7 +241,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
resp = rpcProxy.cacheReport(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
if (resp.hasCmd()) {
|
||||
return PBHelper.convert(resp.getCmd());
|
||||
|
@ -267,7 +271,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.blockReceivedAndDeleted(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -280,7 +285,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.errorReport(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -290,7 +296,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
|
|||
return PBHelper.convert(rpcProxy.versionRequest(NULL_CONTROLLER,
|
||||
VOID_VERSION_REQUEST).getInfo());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -305,7 +312,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.reportBadBlocks(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -330,7 +338,8 @@ public class DatanodeProtocolClientSideTranslatorPB implements
|
|||
try {
|
||||
rpcProxy.commitBlockSynchronization(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -16,6 +16,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hdfs.protocolPB;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
@ -30,7 +31,6 @@ import org.apache.hadoop.hdfs.server.aliasmap.InMemoryAliasMapProtocol;
|
|||
import org.apache.hadoop.hdfs.server.common.FileRegion;
|
||||
import org.apache.hadoop.hdfs.server.namenode.ha.AbstractNNFailoverProxyProvider;
|
||||
import org.apache.hadoop.hdfs.server.namenode.ha.InMemoryAliasMapFailoverProxyProvider;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.slf4j.Logger;
|
||||
|
@ -158,7 +158,8 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
|
|||
}
|
||||
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -186,7 +187,8 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
|
|||
return Optional.empty();
|
||||
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -209,7 +211,8 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
|
|||
try {
|
||||
rpcProxy.write(null, request);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -220,7 +223,8 @@ public class InMemoryAliasMapProtocolClientSideTranslatorPB
|
|||
BlockPoolRequestProto.newBuilder().build());
|
||||
return response.getBlockPoolId();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -34,11 +34,11 @@ import org.apache.hadoop.hdfs.protocol.proto.InterDatanodeProtocolProtos.UpdateR
|
|||
import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
|
||||
import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
|
||||
import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
|
@ -82,7 +82,8 @@ public class InterDatanodeProtocolTranslatorPB implements
|
|||
try {
|
||||
resp = rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
if (!resp.getReplicaFound()) {
|
||||
// No replica found on the remote node.
|
||||
|
@ -112,7 +113,8 @@ public class InterDatanodeProtocolTranslatorPB implements
|
|||
return rpcProxy.updateReplicaUnderRecovery(NULL_CONTROLLER, req
|
||||
).getStorageUuid();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -29,11 +29,11 @@ import org.apache.hadoop.hdfs.protocol.proto.JournalProtocolProtos.StartLogSegme
|
|||
import org.apache.hadoop.hdfs.server.protocol.FenceResponse;
|
||||
import org.apache.hadoop.hdfs.server.protocol.JournalInfo;
|
||||
import org.apache.hadoop.hdfs.server.protocol.JournalProtocol;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
|
@ -72,7 +72,8 @@ public class JournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
try {
|
||||
rpcProxy.journal(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -87,7 +88,8 @@ public class JournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
try {
|
||||
rpcProxy.startLogSegment(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -101,7 +103,8 @@ public class JournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
return new FenceResponse(resp.getPreviousEpoch(),
|
||||
resp.getLastTransactionId(), resp.getInSync());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -51,12 +51,12 @@ import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol;
|
|||
import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
|
||||
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
||||
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
|
@ -111,7 +111,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
return PBHelper.convert(rpcProxy.getBlocks(NULL_CONTROLLER, req)
|
||||
.getBlocks());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -122,7 +123,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
VOID_GET_BLOCKKEYS_REQUEST);
|
||||
return rsp.hasKeys() ? PBHelper.convert(rsp.getKeys()) : null;
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -132,7 +134,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
return rpcProxy.getTransactionId(NULL_CONTROLLER,
|
||||
VOID_GET_TRANSACTIONID_REQUEST).getTxId();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -142,7 +145,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
return rpcProxy.getMostRecentCheckpointTxId(NULL_CONTROLLER,
|
||||
GetMostRecentCheckpointTxIdRequestProto.getDefaultInstance()).getTxId();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,7 +156,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
return PBHelper.convert(rpcProxy.rollEditLog(NULL_CONTROLLER,
|
||||
VOID_ROLL_EDITLOG_REQUEST).getSignature());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -162,7 +167,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
return PBHelper.convert(rpcProxy.versionRequest(NULL_CONTROLLER,
|
||||
VOID_VERSION_REQUEST).getInfo());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -175,7 +181,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
try {
|
||||
rpcProxy.errorReport(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -189,7 +196,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
rpcProxy.registerSubordinateNamenode(NULL_CONTROLLER, req)
|
||||
.getRegistration());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -202,7 +210,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
try {
|
||||
cmd = rpcProxy.startCheckpoint(NULL_CONTROLLER, req).getCommand();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return PBHelper.convert(cmd);
|
||||
}
|
||||
|
@ -216,7 +225,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
try {
|
||||
rpcProxy.endCheckpoint(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -229,7 +239,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
return PBHelper.convert(rpcProxy.getEditLogManifest(NULL_CONTROLLER, req)
|
||||
.getManifest());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -249,7 +260,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
NULL_CONTROLLER, req);
|
||||
return response.getIsUpgradeFinalized();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -262,7 +274,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
NULL_CONTROLLER, req);
|
||||
return response.getIsRollingUpgrade();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -275,7 +288,8 @@ public class NamenodeProtocolTranslatorPB implements NamenodeProtocol,
|
|||
rpcProxy.getNextSPSPath(NULL_CONTROLLER, req);
|
||||
return nextSPSPath.hasSpsPath() ? nextSPSPath.getSpsPath() : null;
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
|
||||
package org.apache.hadoop.hdfs.qjournal.protocolPB;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -27,7 +28,6 @@ import org.apache.hadoop.hdfs.qjournal.protocol.InterQJournalProtocol;
|
|||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetEditLogManifestRequestProto;
|
||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetEditLogManifestResponseProto;
|
||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
|
@ -76,7 +76,8 @@ public class InterQJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
req.build()
|
||||
);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -63,11 +63,11 @@ import org.apache.hadoop.hdfs.qjournal.protocol.RequestInfo;
|
|||
import org.apache.hadoop.hdfs.server.common.StorageInfo;
|
||||
import org.apache.hadoop.hdfs.server.protocol.JournalProtocol;
|
||||
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
|
@ -108,7 +108,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
NULL_CONTROLLER, req.build());
|
||||
return resp.getIsFormatted();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -125,7 +126,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
}
|
||||
return rpcProxy.getJournalState(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -151,7 +153,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
|
||||
rpcProxy.format(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -172,7 +175,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
|
||||
return rpcProxy.newEpoch(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -190,7 +194,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
try {
|
||||
rpcProxy.journal(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -201,7 +206,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
.setReqInfo(convert(reqInfo))
|
||||
.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -230,7 +236,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
try {
|
||||
rpcProxy.startLogSegment(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -246,7 +253,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
try {
|
||||
rpcProxy.finalizeLogSegment(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -260,7 +268,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
try {
|
||||
rpcProxy.purgeLogs(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -281,7 +290,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
req.build()
|
||||
);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -299,7 +309,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
}
|
||||
return rpcProxy.getJournaledEdits(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -313,7 +324,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
.setSegmentTxId(segmentTxId)
|
||||
.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -328,7 +340,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
.setFromURL(fromUrl.toExternalForm())
|
||||
.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -346,7 +359,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
.setJid(convertJournalId(jid));
|
||||
rpcProxy.doPreUpgrade(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -359,7 +373,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
.setSInfo(PBHelper.convert(sInfo))
|
||||
.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -374,7 +389,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
}
|
||||
rpcProxy.doFinalize(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -397,7 +413,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
NULL_CONTROLLER, req.build());
|
||||
return response.getCanRollBack();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -413,7 +430,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
}
|
||||
rpcProxy.doRollback(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -432,7 +450,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
}
|
||||
rpcProxy.discardSegments(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -451,7 +470,8 @@ public class QJournalProtocolTranslatorPB implements ProtocolMetaInterface,
|
|||
NULL_CONTROLLER, req.build());
|
||||
return response.getResultCTime();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -22,10 +22,10 @@ import java.io.Closeable;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol;
|
||||
import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocolPB;
|
||||
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshAdminAclsRequestProto;
|
||||
|
@ -77,7 +77,8 @@ public class HSAdminRefreshProtocolClientSideTranslatorPB implements
|
|||
rpcProxy.refreshAdminAcls(NULL_CONTROLLER,
|
||||
VOID_REFRESH_ADMIN_ACLS_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -88,7 +89,8 @@ public class HSAdminRefreshProtocolClientSideTranslatorPB implements
|
|||
rpcProxy.refreshLoadedJobCache(NULL_CONTROLLER,
|
||||
VOID_REFRESH_LOADED_JOB_CACHE_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -98,7 +100,8 @@ public class HSAdminRefreshProtocolClientSideTranslatorPB implements
|
|||
rpcProxy.refreshJobRetentionSettings(NULL_CONTROLLER,
|
||||
VOID_REFRESH_JOB_RETENTION_SETTINGS_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -108,7 +111,8 @@ public class HSAdminRefreshProtocolClientSideTranslatorPB implements
|
|||
rpcProxy.refreshLogRetentionSettings(NULL_CONTROLLER,
|
||||
VOID_REFRESH_LOG_RETENTION_SETTINGS_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.extractRemoteException(se);
|
||||
|
||||
throw ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -84,10 +84,13 @@
|
|||
<!-- com.google.re2j version -->
|
||||
<re2j.version>1.1</re2j.version>
|
||||
|
||||
<!--Protobuf version for backward compatibility-->
|
||||
<!-- Protobuf version for backward compatibility -->
|
||||
<!-- This is used in hadoop-common for compilation only -->
|
||||
<protobuf.version>2.5.0</protobuf.version>
|
||||
<!-- Protobuf scope in hadoop common -->
|
||||
<common.protobuf2.scope>compile</common.protobuf2.scope>
|
||||
<!-- Protobuf scope in other modules which explicitly import the libarary -->
|
||||
<protobuf2.scope>provided</protobuf2.scope>
|
||||
|
||||
<!-- ProtocolBuffer version, actually used in Hadoop -->
|
||||
<hadoop.protobuf.version>3.7.1</hadoop.protobuf.version>
|
||||
<protoc.path>${env.HADOOP_PROTOC_PATH}</protoc.path>
|
||||
|
|
|
@ -24,9 +24,9 @@ import java.net.InetSocketAddress;
|
|||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
import org.apache.hadoop.yarn.ipc.RPCUtil;
|
||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto;
|
||||
|
@ -223,7 +223,7 @@ public class ResourceManagerAdministrationProtocolPBClientImpl implements Resour
|
|||
return (String[]) responseProto.getGroupsList().toArray(
|
||||
new String[responseProto.getGroupsCount()]);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.extractRemoteException(e);
|
||||
throw ShadedProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue