diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml index cf5c3874d10..23e39d055ff 100644 --- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml +++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml @@ -283,6 +283,10 @@ + + + + diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml index 0a08c01e30f..c22222d8a3f 100644 --- a/hadoop-common-project/hadoop-common/pom.xml +++ b/hadoop-common-project/hadoop-common/pom.xml @@ -395,7 +395,12 @@ src-compile-protoc - false + + false + + ProtobufRpcEngine.proto + + src-test-compile-protoc @@ -411,6 +416,9 @@ replace-generated-sources false + + **/ProtobufRpcEngineProtos.java + @@ -423,6 +431,14 @@ replace-sources false + + + **/ProtobufHelper.java + **/RpcWritable.java + **/ProtobufRpcEngineCallback.java + **/ProtobufRpcEngine.java + **/ProtobufRpcEngineProtos.java + @@ -1015,7 +1031,79 @@ - + + + aarch64 + + false + + aarch64 + + + + + + org.codehaus.mojo + build-helper-maven-plugin + + + add-source-legacy-protobuf + generate-sources + + add-source + + + + ${basedir}/src/main/arm-java + + + + + + + + + + + x86_64 + + false + + !aarch64 + + + + + + org.xolstice.maven.plugins + protobuf-maven-plugin + + + src-compile-protoc-legacy + generate-sources + + compile + + + false + + + com.google.protobuf:protoc:${protobuf.version}:exe:${os.detected.classifier} + + false + ${basedir}/src/main/proto + ${project.build.directory}/generated-sources/java + false + + ProtobufRpcEngine.proto + + + + + + + + diff --git a/hadoop-common-project/hadoop-common/src/main/arm-java/org/apache/hadoop/ipc/protobuf/ProtobufRpcEngineProtos.java b/hadoop-common-project/hadoop-common/src/main/arm-java/org/apache/hadoop/ipc/protobuf/ProtobufRpcEngineProtos.java new file mode 100644 index 00000000000..28e28bf6337 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/arm-java/org/apache/hadoop/ipc/protobuf/ProtobufRpcEngineProtos.java @@ -0,0 +1,1163 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +// This is class is added to source because for arm protoc 2.5.0 executable +// is not available to generate the same code. +// Generated by the protocol buffer compiler. DO NOT EDIT! +// source: ProtobufRpcEngine.proto +package org.apache.hadoop.ipc.protobuf; + +public final class ProtobufRpcEngineProtos { + private ProtobufRpcEngineProtos() {} + public static void registerAllExtensions( + com.google.protobuf.ExtensionRegistry registry) { + } + public interface RequestHeaderProtoOrBuilder + extends com.google.protobuf.MessageOrBuilder { + + // required string methodName = 1; + /** + * required string methodName = 1; + * + *
+     ** Name of the RPC method
+     * 
+ */ + boolean hasMethodName(); + /** + * required string methodName = 1; + * + *
+     ** Name of the RPC method
+     * 
+ */ + java.lang.String getMethodName(); + /** + * required string methodName = 1; + * + *
+     ** Name of the RPC method
+     * 
+ */ + com.google.protobuf.ByteString + getMethodNameBytes(); + + // required string declaringClassProtocolName = 2; + /** + * required string declaringClassProtocolName = 2; + * + *
+     **
+     * RPCs for a particular interface (ie protocol) are done using a
+     * IPC connection that is setup using rpcProxy.
+     * The rpcProxy's has a declared protocol name that is
+     * sent form client to server at connection time.
+     *
+     * Each Rpc call also sends a protocol name
+     * (called declaringClassprotocolName). This name is usually the same
+     * as the connection protocol name except in some cases.
+     * For example metaProtocols such ProtocolInfoProto which get metainfo
+     * about the protocol reuse the connection but need to indicate that
+     * the actual protocol is different (i.e. the protocol is
+     * ProtocolInfoProto) since they reuse the connection; in this case
+     * the declaringClassProtocolName field is set to the ProtocolInfoProto
+     * 
+ */ + boolean hasDeclaringClassProtocolName(); + /** + * required string declaringClassProtocolName = 2; + * + *
+     **
+     * RPCs for a particular interface (ie protocol) are done using a
+     * IPC connection that is setup using rpcProxy.
+     * The rpcProxy's has a declared protocol name that is
+     * sent form client to server at connection time.
+     *
+     * Each Rpc call also sends a protocol name
+     * (called declaringClassprotocolName). This name is usually the same
+     * as the connection protocol name except in some cases.
+     * For example metaProtocols such ProtocolInfoProto which get metainfo
+     * about the protocol reuse the connection but need to indicate that
+     * the actual protocol is different (i.e. the protocol is
+     * ProtocolInfoProto) since they reuse the connection; in this case
+     * the declaringClassProtocolName field is set to the ProtocolInfoProto
+     * 
+ */ + java.lang.String getDeclaringClassProtocolName(); + /** + * required string declaringClassProtocolName = 2; + * + *
+     **
+     * RPCs for a particular interface (ie protocol) are done using a
+     * IPC connection that is setup using rpcProxy.
+     * The rpcProxy's has a declared protocol name that is
+     * sent form client to server at connection time.
+     *
+     * Each Rpc call also sends a protocol name
+     * (called declaringClassprotocolName). This name is usually the same
+     * as the connection protocol name except in some cases.
+     * For example metaProtocols such ProtocolInfoProto which get metainfo
+     * about the protocol reuse the connection but need to indicate that
+     * the actual protocol is different (i.e. the protocol is
+     * ProtocolInfoProto) since they reuse the connection; in this case
+     * the declaringClassProtocolName field is set to the ProtocolInfoProto
+     * 
+ */ + com.google.protobuf.ByteString + getDeclaringClassProtocolNameBytes(); + + // required uint64 clientProtocolVersion = 3; + /** + * required uint64 clientProtocolVersion = 3; + * + *
+     ** protocol version of class declaring the called method
+     * 
+ */ + boolean hasClientProtocolVersion(); + /** + * required uint64 clientProtocolVersion = 3; + * + *
+     ** protocol version of class declaring the called method
+     * 
+ */ + long getClientProtocolVersion(); + } + /** + * Protobuf type {@code hadoop.common.RequestHeaderProto} + * + *
+   **
+   * This message is the header for the Protobuf Rpc Engine
+   * when sending a RPC request from  RPC client to the RPC server.
+   * The actual request (serialized as protobuf) follows this request.
+   *
+   * No special header is needed for the Rpc Response for Protobuf Rpc Engine.
+   * The normal RPC response header (see RpcHeader.proto) are sufficient.
+   * 
+ */ + public static final class RequestHeaderProto extends + com.google.protobuf.GeneratedMessage + implements RequestHeaderProtoOrBuilder { + // Use RequestHeaderProto.newBuilder() to construct. + private RequestHeaderProto(com.google.protobuf.GeneratedMessage.Builder builder) { + super(builder); + this.unknownFields = builder.getUnknownFields(); + } + private RequestHeaderProto(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); } + + private static final RequestHeaderProto defaultInstance; + public static RequestHeaderProto getDefaultInstance() { + return defaultInstance; + } + + public RequestHeaderProto getDefaultInstanceForType() { + return defaultInstance; + } + + private final com.google.protobuf.UnknownFieldSet unknownFields; + @java.lang.Override + public final com.google.protobuf.UnknownFieldSet + getUnknownFields() { + return this.unknownFields; + } + private RequestHeaderProto( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + initFields(); + int mutable_bitField0_ = 0; + com.google.protobuf.UnknownFieldSet.Builder unknownFields = + com.google.protobuf.UnknownFieldSet.newBuilder(); + try { + boolean done = false; + while (!done) { + int tag = input.readTag(); + switch (tag) { + case 0: + done = true; + break; + default: { + if (!parseUnknownField(input, unknownFields, + extensionRegistry, tag)) { + done = true; + } + break; + } + case 10: { + bitField0_ |= 0x00000001; + methodName_ = input.readBytes(); + break; + } + case 18: { + bitField0_ |= 0x00000002; + declaringClassProtocolName_ = input.readBytes(); + break; + } + case 24: { + bitField0_ |= 0x00000004; + clientProtocolVersion_ = input.readUInt64(); + break; + } + } + } + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + throw e.setUnfinishedMessage(this); + } catch (java.io.IOException e) { + throw new com.google.protobuf.InvalidProtocolBufferException( + e.getMessage()).setUnfinishedMessage(this); + } finally { + this.unknownFields = unknownFields.build(); + makeExtensionsImmutable(); + } + } + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.internal_static_hadoop_common_RequestHeaderProto_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.internal_static_hadoop_common_RequestHeaderProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto.Builder.class); + } + + public static com.google.protobuf.Parser PARSER = + new com.google.protobuf.AbstractParser() { + public RequestHeaderProto parsePartialFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return new RequestHeaderProto(input, extensionRegistry); + } + }; + + @java.lang.Override + public com.google.protobuf.Parser getParserForType() { + return PARSER; + } + + private int bitField0_; + // required string methodName = 1; + public static final int METHODNAME_FIELD_NUMBER = 1; + private java.lang.Object methodName_; + /** + * required string methodName = 1; + * + *
+     ** Name of the RPC method
+     * 
+ */ + public boolean hasMethodName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string methodName = 1; + * + *
+     ** Name of the RPC method
+     * 
+ */ + public java.lang.String getMethodName() { + java.lang.Object ref = methodName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + methodName_ = s; + } + return s; + } + } + /** + * required string methodName = 1; + * + *
+     ** Name of the RPC method
+     * 
+ */ + public com.google.protobuf.ByteString + getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required string declaringClassProtocolName = 2; + public static final int DECLARINGCLASSPROTOCOLNAME_FIELD_NUMBER = 2; + private java.lang.Object declaringClassProtocolName_; + /** + * required string declaringClassProtocolName = 2; + * + *
+     **
+     * RPCs for a particular interface (ie protocol) are done using a
+     * IPC connection that is setup using rpcProxy.
+     * The rpcProxy's has a declared protocol name that is
+     * sent form client to server at connection time.
+     *
+     * Each Rpc call also sends a protocol name
+     * (called declaringClassprotocolName). This name is usually the same
+     * as the connection protocol name except in some cases.
+     * For example metaProtocols such ProtocolInfoProto which get metainfo
+     * about the protocol reuse the connection but need to indicate that
+     * the actual protocol is different (i.e. the protocol is
+     * ProtocolInfoProto) since they reuse the connection; in this case
+     * the declaringClassProtocolName field is set to the ProtocolInfoProto
+     * 
+ */ + public boolean hasDeclaringClassProtocolName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string declaringClassProtocolName = 2; + * + *
+     **
+     * RPCs for a particular interface (ie protocol) are done using a
+     * IPC connection that is setup using rpcProxy.
+     * The rpcProxy's has a declared protocol name that is
+     * sent form client to server at connection time.
+     *
+     * Each Rpc call also sends a protocol name
+     * (called declaringClassprotocolName). This name is usually the same
+     * as the connection protocol name except in some cases.
+     * For example metaProtocols such ProtocolInfoProto which get metainfo
+     * about the protocol reuse the connection but need to indicate that
+     * the actual protocol is different (i.e. the protocol is
+     * ProtocolInfoProto) since they reuse the connection; in this case
+     * the declaringClassProtocolName field is set to the ProtocolInfoProto
+     * 
+ */ + public java.lang.String getDeclaringClassProtocolName() { + java.lang.Object ref = declaringClassProtocolName_; + if (ref instanceof java.lang.String) { + return (java.lang.String) ref; + } else { + com.google.protobuf.ByteString bs = + (com.google.protobuf.ByteString) ref; + java.lang.String s = bs.toStringUtf8(); + if (bs.isValidUtf8()) { + declaringClassProtocolName_ = s; + } + return s; + } + } + /** + * required string declaringClassProtocolName = 2; + * + *
+     **
+     * RPCs for a particular interface (ie protocol) are done using a
+     * IPC connection that is setup using rpcProxy.
+     * The rpcProxy's has a declared protocol name that is
+     * sent form client to server at connection time.
+     *
+     * Each Rpc call also sends a protocol name
+     * (called declaringClassprotocolName). This name is usually the same
+     * as the connection protocol name except in some cases.
+     * For example metaProtocols such ProtocolInfoProto which get metainfo
+     * about the protocol reuse the connection but need to indicate that
+     * the actual protocol is different (i.e. the protocol is
+     * ProtocolInfoProto) since they reuse the connection; in this case
+     * the declaringClassProtocolName field is set to the ProtocolInfoProto
+     * 
+ */ + public com.google.protobuf.ByteString + getDeclaringClassProtocolNameBytes() { + java.lang.Object ref = declaringClassProtocolName_; + if (ref instanceof java.lang.String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + declaringClassProtocolName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + + // required uint64 clientProtocolVersion = 3; + public static final int CLIENTPROTOCOLVERSION_FIELD_NUMBER = 3; + private long clientProtocolVersion_; + /** + * required uint64 clientProtocolVersion = 3; + * + *
+     ** protocol version of class declaring the called method
+     * 
+ */ + public boolean hasClientProtocolVersion() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required uint64 clientProtocolVersion = 3; + * + *
+     ** protocol version of class declaring the called method
+     * 
+ */ + public long getClientProtocolVersion() { + return clientProtocolVersion_; + } + + private void initFields() { + methodName_ = ""; + declaringClassProtocolName_ = ""; + clientProtocolVersion_ = 0L; + } + private byte memoizedIsInitialized = -1; + public final boolean isInitialized() { + byte isInitialized = memoizedIsInitialized; + if (isInitialized != -1) return isInitialized == 1; + + if (!hasMethodName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasDeclaringClassProtocolName()) { + memoizedIsInitialized = 0; + return false; + } + if (!hasClientProtocolVersion()) { + memoizedIsInitialized = 0; + return false; + } + memoizedIsInitialized = 1; + return true; + } + + public void writeTo(com.google.protobuf.CodedOutputStream output) + throws java.io.IOException { + getSerializedSize(); + if (((bitField0_ & 0x00000001) == 0x00000001)) { + output.writeBytes(1, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + output.writeBytes(2, getDeclaringClassProtocolNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + output.writeUInt64(3, clientProtocolVersion_); + } + getUnknownFields().writeTo(output); + } + + private int memoizedSerializedSize = -1; + public int getSerializedSize() { + int size = memoizedSerializedSize; + if (size != -1) return size; + + size = 0; + if (((bitField0_ & 0x00000001) == 0x00000001)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(1, getMethodNameBytes()); + } + if (((bitField0_ & 0x00000002) == 0x00000002)) { + size += com.google.protobuf.CodedOutputStream + .computeBytesSize(2, getDeclaringClassProtocolNameBytes()); + } + if (((bitField0_ & 0x00000004) == 0x00000004)) { + size += com.google.protobuf.CodedOutputStream + .computeUInt64Size(3, clientProtocolVersion_); + } + size += getUnknownFields().getSerializedSize(); + memoizedSerializedSize = size; + return size; + } + + private static final long serialVersionUID = 0L; + @java.lang.Override + protected java.lang.Object writeReplace() + throws java.io.ObjectStreamException { + return super.writeReplace(); + } + + @java.lang.Override + public boolean equals(final java.lang.Object obj) { + if (obj == this) { + return true; + } + if (!(obj instanceof org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto)) { + return super.equals(obj); + } + org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto other = (org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto) obj; + + boolean result = true; + result = result && (hasMethodName() == other.hasMethodName()); + if (hasMethodName()) { + result = result && getMethodName() + .equals(other.getMethodName()); + } + result = result && (hasDeclaringClassProtocolName() == other.hasDeclaringClassProtocolName()); + if (hasDeclaringClassProtocolName()) { + result = result && getDeclaringClassProtocolName() + .equals(other.getDeclaringClassProtocolName()); + } + result = result && (hasClientProtocolVersion() == other.hasClientProtocolVersion()); + if (hasClientProtocolVersion()) { + result = result && (getClientProtocolVersion() + == other.getClientProtocolVersion()); + } + result = result && + getUnknownFields().equals(other.getUnknownFields()); + return result; + } + + private int memoizedHashCode = 0; + @java.lang.Override + public int hashCode() { + if (memoizedHashCode != 0) { + return memoizedHashCode; + } + int hash = 41; + hash = (19 * hash) + getDescriptorForType().hashCode(); + if (hasMethodName()) { + hash = (37 * hash) + METHODNAME_FIELD_NUMBER; + hash = (53 * hash) + getMethodName().hashCode(); + } + if (hasDeclaringClassProtocolName()) { + hash = (37 * hash) + DECLARINGCLASSPROTOCOLNAME_FIELD_NUMBER; + hash = (53 * hash) + getDeclaringClassProtocolName().hashCode(); + } + if (hasClientProtocolVersion()) { + hash = (37 * hash) + CLIENTPROTOCOLVERSION_FIELD_NUMBER; + hash = (53 * hash) + hashLong(getClientProtocolVersion()); + } + hash = (29 * hash) + getUnknownFields().hashCode(); + memoizedHashCode = hash; + return hash; + } + + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseFrom( + com.google.protobuf.ByteString data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseFrom( + com.google.protobuf.ByteString data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseFrom(byte[] data) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data); + } + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseFrom( + byte[] data, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws com.google.protobuf.InvalidProtocolBufferException { + return PARSER.parseFrom(data, extensionRegistry); + } + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseDelimitedFrom(java.io.InputStream input) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input); + } + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseDelimitedFrom( + java.io.InputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseDelimitedFrom(input, extensionRegistry); + } + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseFrom( + com.google.protobuf.CodedInputStream input) + throws java.io.IOException { + return PARSER.parseFrom(input); + } + public static org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parseFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + return PARSER.parseFrom(input, extensionRegistry); + } + + public static Builder newBuilder() { return Builder.create(); } + public Builder newBuilderForType() { return newBuilder(); } + public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto prototype) { + return newBuilder().mergeFrom(prototype); + } + public Builder toBuilder() { return newBuilder(this); } + + @java.lang.Override + protected Builder newBuilderForType( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + Builder builder = new Builder(parent); + return builder; + } + /** + * Protobuf type {@code hadoop.common.RequestHeaderProto} + * + *
+     **
+     * This message is the header for the Protobuf Rpc Engine
+     * when sending a RPC request from  RPC client to the RPC server.
+     * The actual request (serialized as protobuf) follows this request.
+     *
+     * No special header is needed for the Rpc Response for Protobuf Rpc Engine.
+     * The normal RPC response header (see RpcHeader.proto) are sufficient.
+     * 
+ */ + public static final class Builder extends + com.google.protobuf.GeneratedMessage.Builder + implements org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProtoOrBuilder { + public static final com.google.protobuf.Descriptors.Descriptor + getDescriptor() { + return org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.internal_static_hadoop_common_RequestHeaderProto_descriptor; + } + + protected com.google.protobuf.GeneratedMessage.FieldAccessorTable + internalGetFieldAccessorTable() { + return org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.internal_static_hadoop_common_RequestHeaderProto_fieldAccessorTable + .ensureFieldAccessorsInitialized( + org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto.class, org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto.Builder.class); + } + + // Construct using org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto.newBuilder() + private Builder() { + maybeForceBuilderInitialization(); + } + + private Builder( + com.google.protobuf.GeneratedMessage.BuilderParent parent) { + super(parent); + maybeForceBuilderInitialization(); + } + private void maybeForceBuilderInitialization() { + if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) { + } + } + private static Builder create() { + return new Builder(); + } + + public Builder clear() { + super.clear(); + methodName_ = ""; + bitField0_ = (bitField0_ & ~0x00000001); + declaringClassProtocolName_ = ""; + bitField0_ = (bitField0_ & ~0x00000002); + clientProtocolVersion_ = 0L; + bitField0_ = (bitField0_ & ~0x00000004); + return this; + } + + public Builder clone() { + return create().mergeFrom(buildPartial()); + } + + public com.google.protobuf.Descriptors.Descriptor + getDescriptorForType() { + return org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.internal_static_hadoop_common_RequestHeaderProto_descriptor; + } + + public org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto getDefaultInstanceForType() { + return org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto.getDefaultInstance(); + } + + public org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto build() { + org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto result = buildPartial(); + if (!result.isInitialized()) { + throw newUninitializedMessageException(result); + } + return result; + } + + public org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto buildPartial() { + org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto result = new org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto(this); + int from_bitField0_ = bitField0_; + int to_bitField0_ = 0; + if (((from_bitField0_ & 0x00000001) == 0x00000001)) { + to_bitField0_ |= 0x00000001; + } + result.methodName_ = methodName_; + if (((from_bitField0_ & 0x00000002) == 0x00000002)) { + to_bitField0_ |= 0x00000002; + } + result.declaringClassProtocolName_ = declaringClassProtocolName_; + if (((from_bitField0_ & 0x00000004) == 0x00000004)) { + to_bitField0_ |= 0x00000004; + } + result.clientProtocolVersion_ = clientProtocolVersion_; + result.bitField0_ = to_bitField0_; + onBuilt(); + return result; + } + + public Builder mergeFrom(com.google.protobuf.Message other) { + if (other instanceof org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto) { + return mergeFrom((org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto)other); + } else { + super.mergeFrom(other); + return this; + } + } + + public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto other) { + if (other == org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto.getDefaultInstance()) return this; + if (other.hasMethodName()) { + bitField0_ |= 0x00000001; + methodName_ = other.methodName_; + onChanged(); + } + if (other.hasDeclaringClassProtocolName()) { + bitField0_ |= 0x00000002; + declaringClassProtocolName_ = other.declaringClassProtocolName_; + onChanged(); + } + if (other.hasClientProtocolVersion()) { + setClientProtocolVersion(other.getClientProtocolVersion()); + } + this.mergeUnknownFields(other.getUnknownFields()); + return this; + } + + public final boolean isInitialized() { + if (!hasMethodName()) { + + return false; + } + if (!hasDeclaringClassProtocolName()) { + + return false; + } + if (!hasClientProtocolVersion()) { + + return false; + } + return true; + } + + public Builder mergeFrom( + com.google.protobuf.CodedInputStream input, + com.google.protobuf.ExtensionRegistryLite extensionRegistry) + throws java.io.IOException { + org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto parsedMessage = null; + try { + parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry); + } catch (com.google.protobuf.InvalidProtocolBufferException e) { + parsedMessage = (org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto) e.getUnfinishedMessage(); + throw e; + } finally { + if (parsedMessage != null) { + mergeFrom(parsedMessage); + } + } + return this; + } + private int bitField0_; + + // required string methodName = 1; + private java.lang.Object methodName_ = ""; + /** + * required string methodName = 1; + * + *
+       ** Name of the RPC method
+       * 
+ */ + public boolean hasMethodName() { + return ((bitField0_ & 0x00000001) == 0x00000001); + } + /** + * required string methodName = 1; + * + *
+       ** Name of the RPC method
+       * 
+ */ + public java.lang.String getMethodName() { + java.lang.Object ref = methodName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + methodName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string methodName = 1; + * + *
+       ** Name of the RPC method
+       * 
+ */ + public com.google.protobuf.ByteString + getMethodNameBytes() { + java.lang.Object ref = methodName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + methodName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string methodName = 1; + * + *
+       ** Name of the RPC method
+       * 
+ */ + public Builder setMethodName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + methodName_ = value; + onChanged(); + return this; + } + /** + * required string methodName = 1; + * + *
+       ** Name of the RPC method
+       * 
+ */ + public Builder clearMethodName() { + bitField0_ = (bitField0_ & ~0x00000001); + methodName_ = getDefaultInstance().getMethodName(); + onChanged(); + return this; + } + /** + * required string methodName = 1; + * + *
+       ** Name of the RPC method
+       * 
+ */ + public Builder setMethodNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000001; + methodName_ = value; + onChanged(); + return this; + } + + // required string declaringClassProtocolName = 2; + private java.lang.Object declaringClassProtocolName_ = ""; + /** + * required string declaringClassProtocolName = 2; + * + *
+       **
+       * RPCs for a particular interface (ie protocol) are done using a
+       * IPC connection that is setup using rpcProxy.
+       * The rpcProxy's has a declared protocol name that is
+       * sent form client to server at connection time.
+       *
+       * Each Rpc call also sends a protocol name
+       * (called declaringClassprotocolName). This name is usually the same
+       * as the connection protocol name except in some cases.
+       * For example metaProtocols such ProtocolInfoProto which get metainfo
+       * about the protocol reuse the connection but need to indicate that
+       * the actual protocol is different (i.e. the protocol is
+       * ProtocolInfoProto) since they reuse the connection; in this case
+       * the declaringClassProtocolName field is set to the ProtocolInfoProto
+       * 
+ */ + public boolean hasDeclaringClassProtocolName() { + return ((bitField0_ & 0x00000002) == 0x00000002); + } + /** + * required string declaringClassProtocolName = 2; + * + *
+       **
+       * RPCs for a particular interface (ie protocol) are done using a
+       * IPC connection that is setup using rpcProxy.
+       * The rpcProxy's has a declared protocol name that is
+       * sent form client to server at connection time.
+       *
+       * Each Rpc call also sends a protocol name
+       * (called declaringClassprotocolName). This name is usually the same
+       * as the connection protocol name except in some cases.
+       * For example metaProtocols such ProtocolInfoProto which get metainfo
+       * about the protocol reuse the connection but need to indicate that
+       * the actual protocol is different (i.e. the protocol is
+       * ProtocolInfoProto) since they reuse the connection; in this case
+       * the declaringClassProtocolName field is set to the ProtocolInfoProto
+       * 
+ */ + public java.lang.String getDeclaringClassProtocolName() { + java.lang.Object ref = declaringClassProtocolName_; + if (!(ref instanceof java.lang.String)) { + java.lang.String s = ((com.google.protobuf.ByteString) ref) + .toStringUtf8(); + declaringClassProtocolName_ = s; + return s; + } else { + return (java.lang.String) ref; + } + } + /** + * required string declaringClassProtocolName = 2; + * + *
+       **
+       * RPCs for a particular interface (ie protocol) are done using a
+       * IPC connection that is setup using rpcProxy.
+       * The rpcProxy's has a declared protocol name that is
+       * sent form client to server at connection time.
+       *
+       * Each Rpc call also sends a protocol name
+       * (called declaringClassprotocolName). This name is usually the same
+       * as the connection protocol name except in some cases.
+       * For example metaProtocols such ProtocolInfoProto which get metainfo
+       * about the protocol reuse the connection but need to indicate that
+       * the actual protocol is different (i.e. the protocol is
+       * ProtocolInfoProto) since they reuse the connection; in this case
+       * the declaringClassProtocolName field is set to the ProtocolInfoProto
+       * 
+ */ + public com.google.protobuf.ByteString + getDeclaringClassProtocolNameBytes() { + java.lang.Object ref = declaringClassProtocolName_; + if (ref instanceof String) { + com.google.protobuf.ByteString b = + com.google.protobuf.ByteString.copyFromUtf8( + (java.lang.String) ref); + declaringClassProtocolName_ = b; + return b; + } else { + return (com.google.protobuf.ByteString) ref; + } + } + /** + * required string declaringClassProtocolName = 2; + * + *
+       **
+       * RPCs for a particular interface (ie protocol) are done using a
+       * IPC connection that is setup using rpcProxy.
+       * The rpcProxy's has a declared protocol name that is
+       * sent form client to server at connection time.
+       *
+       * Each Rpc call also sends a protocol name
+       * (called declaringClassprotocolName). This name is usually the same
+       * as the connection protocol name except in some cases.
+       * For example metaProtocols such ProtocolInfoProto which get metainfo
+       * about the protocol reuse the connection but need to indicate that
+       * the actual protocol is different (i.e. the protocol is
+       * ProtocolInfoProto) since they reuse the connection; in this case
+       * the declaringClassProtocolName field is set to the ProtocolInfoProto
+       * 
+ */ + public Builder setDeclaringClassProtocolName( + java.lang.String value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + declaringClassProtocolName_ = value; + onChanged(); + return this; + } + /** + * required string declaringClassProtocolName = 2; + * + *
+       **
+       * RPCs for a particular interface (ie protocol) are done using a
+       * IPC connection that is setup using rpcProxy.
+       * The rpcProxy's has a declared protocol name that is
+       * sent form client to server at connection time.
+       *
+       * Each Rpc call also sends a protocol name
+       * (called declaringClassprotocolName). This name is usually the same
+       * as the connection protocol name except in some cases.
+       * For example metaProtocols such ProtocolInfoProto which get metainfo
+       * about the protocol reuse the connection but need to indicate that
+       * the actual protocol is different (i.e. the protocol is
+       * ProtocolInfoProto) since they reuse the connection; in this case
+       * the declaringClassProtocolName field is set to the ProtocolInfoProto
+       * 
+ */ + public Builder clearDeclaringClassProtocolName() { + bitField0_ = (bitField0_ & ~0x00000002); + declaringClassProtocolName_ = getDefaultInstance().getDeclaringClassProtocolName(); + onChanged(); + return this; + } + /** + * required string declaringClassProtocolName = 2; + * + *
+       **
+       * RPCs for a particular interface (ie protocol) are done using a
+       * IPC connection that is setup using rpcProxy.
+       * The rpcProxy's has a declared protocol name that is
+       * sent form client to server at connection time.
+       *
+       * Each Rpc call also sends a protocol name
+       * (called declaringClassprotocolName). This name is usually the same
+       * as the connection protocol name except in some cases.
+       * For example metaProtocols such ProtocolInfoProto which get metainfo
+       * about the protocol reuse the connection but need to indicate that
+       * the actual protocol is different (i.e. the protocol is
+       * ProtocolInfoProto) since they reuse the connection; in this case
+       * the declaringClassProtocolName field is set to the ProtocolInfoProto
+       * 
+ */ + public Builder setDeclaringClassProtocolNameBytes( + com.google.protobuf.ByteString value) { + if (value == null) { + throw new NullPointerException(); + } + bitField0_ |= 0x00000002; + declaringClassProtocolName_ = value; + onChanged(); + return this; + } + + // required uint64 clientProtocolVersion = 3; + private long clientProtocolVersion_ ; + /** + * required uint64 clientProtocolVersion = 3; + * + *
+       ** protocol version of class declaring the called method
+       * 
+ */ + public boolean hasClientProtocolVersion() { + return ((bitField0_ & 0x00000004) == 0x00000004); + } + /** + * required uint64 clientProtocolVersion = 3; + * + *
+       ** protocol version of class declaring the called method
+       * 
+ */ + public long getClientProtocolVersion() { + return clientProtocolVersion_; + } + /** + * required uint64 clientProtocolVersion = 3; + * + *
+       ** protocol version of class declaring the called method
+       * 
+ */ + public Builder setClientProtocolVersion(long value) { + bitField0_ |= 0x00000004; + clientProtocolVersion_ = value; + onChanged(); + return this; + } + /** + * required uint64 clientProtocolVersion = 3; + * + *
+       ** protocol version of class declaring the called method
+       * 
+ */ + public Builder clearClientProtocolVersion() { + bitField0_ = (bitField0_ & ~0x00000004); + clientProtocolVersion_ = 0L; + onChanged(); + return this; + } + + // @@protoc_insertion_point(builder_scope:hadoop.common.RequestHeaderProto) + } + + static { + defaultInstance = new RequestHeaderProto(true); + defaultInstance.initFields(); + } + + // @@protoc_insertion_point(class_scope:hadoop.common.RequestHeaderProto) + } + + private static com.google.protobuf.Descriptors.Descriptor + internal_static_hadoop_common_RequestHeaderProto_descriptor; + private static + com.google.protobuf.GeneratedMessage.FieldAccessorTable + internal_static_hadoop_common_RequestHeaderProto_fieldAccessorTable; + + public static com.google.protobuf.Descriptors.FileDescriptor + getDescriptor() { + return descriptor; + } + private static com.google.protobuf.Descriptors.FileDescriptor + descriptor; + static { + java.lang.String[] descriptorData = { + "\n\027ProtobufRpcEngine.proto\022\rhadoop.common" + + "\"k\n\022RequestHeaderProto\022\022\n\nmethodName\030\001 \002" + + "(\t\022\"\n\032declaringClassProtocolName\030\002 \002(\t\022\035" + + "\n\025clientProtocolVersion\030\003 \002(\004B<\n\036org.apa" + + "che.hadoop.ipc.protobufB\027ProtobufRpcEngi" + + "neProtos\240\001\001" + }; + com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner = + new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() { + public com.google.protobuf.ExtensionRegistry assignDescriptors( + com.google.protobuf.Descriptors.FileDescriptor root) { + descriptor = root; + internal_static_hadoop_common_RequestHeaderProto_descriptor = + getDescriptor().getMessageTypes().get(0); + internal_static_hadoop_common_RequestHeaderProto_fieldAccessorTable = new + com.google.protobuf.GeneratedMessage.FieldAccessorTable( + internal_static_hadoop_common_RequestHeaderProto_descriptor, + new java.lang.String[] { "MethodName", "DeclaringClassProtocolName", "ClientProtocolVersion", }); + return null; + } + }; + com.google.protobuf.Descriptors.FileDescriptor + .internalBuildGeneratedFileFrom(descriptorData, + new com.google.protobuf.Descriptors.FileDescriptor[] { + }, assigner); + } + + // @@protoc_insertion_point(outer_class_scope) +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java index 86dd91ee142..1e5b27a6fea 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFCRpcServer.java @@ -27,7 +27,7 @@ import org.apache.hadoop.fs.CommonConfigurationKeys; import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.ZKFCProtocolService; import org.apache.hadoop.ha.protocolPB.ZKFCProtocolPB; import org.apache.hadoop.ha.protocolPB.ZKFCProtocolServerSideTranslatorPB; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC.Server; import org.apache.hadoop.security.AccessControlException; @@ -50,7 +50,7 @@ public class ZKFCRpcServer implements ZKFCProtocol { this.zkfc = zkfc; RPC.setProtocolEngine(conf, ZKFCProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); ZKFCProtocolServerSideTranslatorPB translator = new ZKFCProtocolServerSideTranslatorPB(this); BlockingService service = ZKFCProtocolService diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java index e53820cd131..2cbfd0d0ec0 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/HAServiceProtocolClientSideTranslatorPB.java @@ -38,7 +38,7 @@ import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequ import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto; import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToObserverRequestProto; import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.UserGroupInformation; @@ -67,7 +67,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements public HAServiceProtocolClientSideTranslatorPB(InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, HAServiceProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); rpcProxy = RPC.getProxy(HAServiceProtocolPB.class, RPC.getProtocolVersion(HAServiceProtocolPB.class), addr, conf); } @@ -76,7 +76,7 @@ public class HAServiceProtocolClientSideTranslatorPB implements InetSocketAddress addr, Configuration conf, SocketFactory socketFactory, int timeout) throws IOException { RPC.setProtocolEngine(conf, HAServiceProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); rpcProxy = RPC.getProxy(HAServiceProtocolPB.class, RPC.getProtocolVersion(HAServiceProtocolPB.class), addr, UserGroupInformation.getCurrentUser(), conf, socketFactory, timeout); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java index 7001d93995f..3777207c7e4 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/protocolPB/ZKFCProtocolClientSideTranslatorPB.java @@ -28,7 +28,7 @@ import org.apache.hadoop.ha.ZKFCProtocol; import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto; import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto; import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.AccessControlException; @@ -48,7 +48,7 @@ public class ZKFCProtocolClientSideTranslatorPB implements InetSocketAddress addr, Configuration conf, SocketFactory socketFactory, int timeout) throws IOException { RPC.setProtocolEngine(conf, ZKFCProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); rpcProxy = RPC.getProxy(ZKFCProtocolPB.class, RPC.getProtocolVersion(ZKFCProtocolPB.class), addr, UserGroupInformation.getCurrentUser(), conf, socketFactory, timeout); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java index bb86cfc35bf..1e110b90113 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java @@ -53,6 +53,23 @@ public class ProtobufHelper { return e instanceof IOException ? (IOException) e : new IOException(se); } + /** + * Kept for backward compatible. + * Return the IOException thrown by the remote server wrapped in + * ServiceException as cause. + * @param se ServiceException that wraps IO exception thrown by the server + * @return Exception wrapped in ServiceException or + * a new IOException that wraps the unexpected ServiceException. + */ + @Deprecated + public static IOException getRemoteException( + com.google.protobuf.ServiceException se) { + Throwable e = se.getCause(); + if (e == null) { + return new IOException(se); + } + return e instanceof IOException ? (IOException) e : new IOException(se); + } /** * Map used to cache fixed strings to ByteStrings. Since there is no diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java index 14b356f847a..220ad1ded9f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java @@ -19,8 +19,11 @@ package org.apache.hadoop.ipc; import com.google.common.annotations.VisibleForTesting; -import org.apache.hadoop.thirdparty.protobuf.*; -import org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor; +import com.google.protobuf.BlockingService; +import com.google.protobuf.Descriptors.MethodDescriptor; +import com.google.protobuf.Message; +import com.google.protobuf.ServiceException; +import com.google.protobuf.TextFormat; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability.Unstable; @@ -29,6 +32,7 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.ipc.Client.ConnectionId; import org.apache.hadoop.ipc.RPC.RpcInvoker; +import org.apache.hadoop.ipc.RPC.RpcKind; import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngineProtos.RequestHeaderProto; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.token.SecretManager; @@ -52,7 +56,10 @@ import java.util.concurrent.atomic.AtomicBoolean; /** * RPC Engine for for protobuf based RPCs. + * This engine uses Protobuf 2.5.0. Recommended to upgrade to Protobuf 3.x + * from hadoop-thirdparty and use ProtobufRpcEngine2. */ +@Deprecated @InterfaceStability.Evolving public class ProtobufRpcEngine implements RpcEngine { public static final Logger LOG = @@ -355,6 +362,7 @@ public class ProtobufRpcEngine implements RpcEngine { new ThreadLocal<>(); static final ThreadLocal currentCallInfo = new ThreadLocal<>(); + private static final RpcInvoker RPC_INVOKER = new ProtoBufRpcInvoker(); static class CallInfo { private final RPC.Server server; @@ -433,7 +441,15 @@ public class ProtobufRpcEngine implements RpcEngine { registerProtocolAndImpl(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocolClass, protocolImpl); } - + + @Override + protected RpcInvoker getServerRpcInvoker(RpcKind rpcKind) { + if (rpcKind == RpcKind.RPC_PROTOCOL_BUFFER) { + return RPC_INVOKER; + } + return super.getServerRpcInvoker(rpcKind); + } + /** * Protobuf invoker for {@link RpcInvoker} */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java new file mode 100644 index 00000000000..30315343962 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java @@ -0,0 +1,598 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ipc; + +import com.google.common.annotations.VisibleForTesting; +import org.apache.hadoop.thirdparty.protobuf.*; +import org.apache.hadoop.thirdparty.protobuf.Descriptors.MethodDescriptor; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.classification.InterfaceStability.Unstable; +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.io.Writable; +import org.apache.hadoop.io.retry.RetryPolicy; +import org.apache.hadoop.ipc.Client.ConnectionId; +import org.apache.hadoop.ipc.RPC.RpcInvoker; +import org.apache.hadoop.ipc.protobuf.ProtobufRpcEngine2Protos.RequestHeaderProto; +import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.security.token.SecretManager; +import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.util.Time; +import org.apache.hadoop.util.concurrent.AsyncGet; +import org.apache.htrace.core.TraceScope; +import org.apache.htrace.core.Tracer; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.net.SocketFactory; +import java.io.IOException; +import java.lang.reflect.Method; +import java.lang.reflect.Proxy; +import java.net.InetSocketAddress; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * RPC Engine for for protobuf based RPCs. + */ +@InterfaceStability.Evolving +public class ProtobufRpcEngine2 implements RpcEngine { + public static final Logger LOG = + LoggerFactory.getLogger(ProtobufRpcEngine2.class); + private static final ThreadLocal> + ASYNC_RETURN_MESSAGE = new ThreadLocal<>(); + + static { // Register the rpcRequest deserializer for ProtobufRpcEngine + org.apache.hadoop.ipc.Server.registerProtocolEngine( + RPC.RpcKind.RPC_PROTOCOL_BUFFER, RpcProtobufRequest.class, + new Server.ProtoBufRpcInvoker()); + } + + private static final ClientCache CLIENTS = new ClientCache(); + + @Unstable + public static AsyncGet getAsyncReturnMessage() { + return ASYNC_RETURN_MESSAGE.get(); + } + + public ProtocolProxy getProxy(Class protocol, long clientVersion, + InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, + SocketFactory factory, int rpcTimeout) throws IOException { + return getProxy(protocol, clientVersion, addr, ticket, conf, factory, + rpcTimeout, null); + } + + @Override + public ProtocolProxy getProxy( + Class protocol, long clientVersion, + InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, + SocketFactory factory, int rpcTimeout, RetryPolicy connectionRetryPolicy) + throws IOException { + return getProxy(protocol, clientVersion, addr, ticket, conf, factory, + rpcTimeout, connectionRetryPolicy, null, null); + } + + @Override + @SuppressWarnings("unchecked") + public ProtocolProxy getProxy(Class protocol, long clientVersion, + InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, + SocketFactory factory, int rpcTimeout, RetryPolicy connectionRetryPolicy, + AtomicBoolean fallbackToSimpleAuth, AlignmentContext alignmentContext) + throws IOException { + + final Invoker invoker = new Invoker(protocol, addr, ticket, conf, factory, + rpcTimeout, connectionRetryPolicy, fallbackToSimpleAuth, + alignmentContext); + return new ProtocolProxy(protocol, (T) Proxy.newProxyInstance( + protocol.getClassLoader(), new Class[]{protocol}, invoker), false); + } + + @Override + public ProtocolProxy getProtocolMetaInfoProxy( + ConnectionId connId, Configuration conf, SocketFactory factory) + throws IOException { + Class protocol = ProtocolMetaInfoPB.class; + return new ProtocolProxy(protocol, + (ProtocolMetaInfoPB) Proxy.newProxyInstance(protocol.getClassLoader(), + new Class[]{protocol}, new Invoker(protocol, connId, conf, + factory)), false); + } + + private static final class Invoker implements RpcInvocationHandler { + private final Map returnTypes = + new ConcurrentHashMap(); + private boolean isClosed = false; + private final Client.ConnectionId remoteId; + private final Client client; + private final long clientProtocolVersion; + private final String protocolName; + private AtomicBoolean fallbackToSimpleAuth; + private AlignmentContext alignmentContext; + + private Invoker(Class protocol, InetSocketAddress addr, + UserGroupInformation ticket, Configuration conf, SocketFactory factory, + int rpcTimeout, RetryPolicy connectionRetryPolicy, + AtomicBoolean fallbackToSimpleAuth, AlignmentContext alignmentContext) + throws IOException { + this(protocol, Client.ConnectionId.getConnectionId( + addr, protocol, ticket, rpcTimeout, connectionRetryPolicy, conf), + conf, factory); + this.fallbackToSimpleAuth = fallbackToSimpleAuth; + this.alignmentContext = alignmentContext; + } + + /** + * This constructor takes a connectionId, instead of creating a new one. + */ + private Invoker(Class protocol, Client.ConnectionId connId, + Configuration conf, SocketFactory factory) { + this.remoteId = connId; + this.client = CLIENTS.getClient(conf, factory, RpcWritable.Buffer.class); + this.protocolName = RPC.getProtocolName(protocol); + this.clientProtocolVersion = RPC + .getProtocolVersion(protocol); + } + + private RequestHeaderProto constructRpcRequestHeader(Method method) { + RequestHeaderProto.Builder builder = RequestHeaderProto + .newBuilder(); + builder.setMethodName(method.getName()); + + + // For protobuf, {@code protocol} used when creating client side proxy is + // the interface extending BlockingInterface, which has the annotations + // such as ProtocolName etc. + // + // Using Method.getDeclaringClass(), as in WritableEngine to get at + // the protocol interface will return BlockingInterface, from where + // the annotation ProtocolName and Version cannot be + // obtained. + // + // Hence we simply use the protocol class used to create the proxy. + // For PB this may limit the use of mixins on client side. + builder.setDeclaringClassProtocolName(protocolName); + builder.setClientProtocolVersion(clientProtocolVersion); + return builder.build(); + } + + /** + * This is the client side invoker of RPC method. It only throws + * ServiceException, since the invocation proxy expects only + * ServiceException to be thrown by the method in case protobuf service. + * + * ServiceException has the following causes: + *
    + *
  1. Exceptions encountered on the client side in this method are + * set as cause in ServiceException as is.
  2. + *
  3. Exceptions from the server are wrapped in RemoteException and are + * set as cause in ServiceException
  4. + *
+ * + * Note that the client calling protobuf RPC methods, must handle + * ServiceException by getting the cause from the ServiceException. If the + * cause is RemoteException, then unwrap it to get the exception thrown by + * the server. + */ + @Override + public Message invoke(Object proxy, final Method method, Object[] args) + throws ServiceException { + long startTime = 0; + if (LOG.isDebugEnabled()) { + startTime = Time.now(); + } + + if (args.length != 2) { // RpcController + Message + throw new ServiceException( + "Too many or few parameters for request. Method: [" + + method.getName() + "]" + ", Expected: 2, Actual: " + + args.length); + } + if (args[1] == null) { + throw new ServiceException("null param while calling Method: [" + + method.getName() + "]"); + } + + // if Tracing is on then start a new span for this rpc. + // guard it in the if statement to make sure there isn't + // any extra string manipulation. + Tracer tracer = Tracer.curThreadTracer(); + TraceScope traceScope = null; + if (tracer != null) { + traceScope = tracer.newScope(RpcClientUtil.methodToTraceString(method)); + } + + RequestHeaderProto rpcRequestHeader = constructRpcRequestHeader(method); + + if (LOG.isTraceEnabled()) { + LOG.trace(Thread.currentThread().getId() + ": Call -> " + + remoteId + ": " + method.getName() + + " {" + TextFormat.shortDebugString((Message) args[1]) + "}"); + } + + + final Message theRequest = (Message) args[1]; + final RpcWritable.Buffer val; + try { + val = (RpcWritable.Buffer) client.call(RPC.RpcKind.RPC_PROTOCOL_BUFFER, + new RpcProtobufRequest(rpcRequestHeader, theRequest), remoteId, + fallbackToSimpleAuth, alignmentContext); + + } catch (Throwable e) { + if (LOG.isTraceEnabled()) { + LOG.trace(Thread.currentThread().getId() + ": Exception <- " + + remoteId + ": " + method.getName() + + " {" + e + "}"); + } + if (traceScope != null) { + traceScope.addTimelineAnnotation("Call got exception: " + + e.toString()); + } + throw new ServiceException(e); + } finally { + if (traceScope != null) { + traceScope.close(); + } + } + + if (LOG.isDebugEnabled()) { + long callTime = Time.now() - startTime; + LOG.debug("Call: " + method.getName() + " took " + callTime + "ms"); + } + + if (Client.isAsynchronousMode()) { + final AsyncGet arr + = Client.getAsyncRpcResponse(); + final AsyncGet asyncGet = + new AsyncGet() { + @Override + public Message get(long timeout, TimeUnit unit) throws Exception { + return getReturnMessage(method, arr.get(timeout, unit)); + } + + @Override + public boolean isDone() { + return arr.isDone(); + } + }; + ASYNC_RETURN_MESSAGE.set(asyncGet); + return null; + } else { + return getReturnMessage(method, val); + } + } + + private Message getReturnMessage(final Method method, + final RpcWritable.Buffer buf) throws ServiceException { + Message prototype = null; + try { + prototype = getReturnProtoType(method); + } catch (Exception e) { + throw new ServiceException(e); + } + Message returnMessage; + try { + returnMessage = buf.getValue(prototype.getDefaultInstanceForType()); + + if (LOG.isTraceEnabled()) { + LOG.trace(Thread.currentThread().getId() + ": Response <- " + + remoteId + ": " + method.getName() + + " {" + TextFormat.shortDebugString(returnMessage) + "}"); + } + + } catch (Throwable e) { + throw new ServiceException(e); + } + return returnMessage; + } + + @Override + public void close() throws IOException { + if (!isClosed) { + isClosed = true; + CLIENTS.stopClient(client); + } + } + + private Message getReturnProtoType(Method method) throws Exception { + if (returnTypes.containsKey(method.getName())) { + return returnTypes.get(method.getName()); + } + + Class returnType = method.getReturnType(); + Method newInstMethod = returnType.getMethod("getDefaultInstance"); + newInstMethod.setAccessible(true); + Message prototype = (Message) newInstMethod.invoke(null, (Object[]) null); + returnTypes.put(method.getName(), prototype); + return prototype; + } + + @Override //RpcInvocationHandler + public ConnectionId getConnectionId() { + return remoteId; + } + } + + @VisibleForTesting + @InterfaceAudience.Private + @InterfaceStability.Unstable + static Client getClient(Configuration conf) { + return CLIENTS.getClient(conf, SocketFactory.getDefault(), + RpcWritable.Buffer.class); + } + + + + @Override + public RPC.Server getServer(Class protocol, Object protocolImpl, + String bindAddress, int port, int numHandlers, int numReaders, + int queueSizePerHandler, boolean verbose, Configuration conf, + SecretManager secretManager, + String portRangeConfig, AlignmentContext alignmentContext) + throws IOException { + return new Server(protocol, protocolImpl, conf, bindAddress, port, + numHandlers, numReaders, queueSizePerHandler, verbose, secretManager, + portRangeConfig, alignmentContext); + } + + public static class Server extends RPC.Server { + + static final ThreadLocal CURRENT_CALLBACK = + new ThreadLocal<>(); + + static final ThreadLocal CURRENT_CALL_INFO = new ThreadLocal<>(); + + static class CallInfo { + private final RPC.Server server; + private final String methodName; + + CallInfo(RPC.Server server, String methodName) { + this.server = server; + this.methodName = methodName; + } + } + + static class ProtobufRpcEngineCallbackImpl + implements ProtobufRpcEngineCallback2 { + + private final RPC.Server server; + private final Call call; + private final String methodName; + private final long setupTime; + + ProtobufRpcEngineCallbackImpl() { + this.server = CURRENT_CALL_INFO.get().server; + this.call = Server.getCurCall().get(); + this.methodName = CURRENT_CALL_INFO.get().methodName; + this.setupTime = Time.now(); + } + + @Override + public void setResponse(Message message) { + long processingTime = Time.now() - setupTime; + call.setDeferredResponse(RpcWritable.wrap(message)); + server.updateDeferredMetrics(methodName, processingTime); + } + + @Override + public void error(Throwable t) { + long processingTime = Time.now() - setupTime; + String detailedMetricsName = t.getClass().getSimpleName(); + server.updateDeferredMetrics(detailedMetricsName, processingTime); + call.setDeferredError(t); + } + } + + @InterfaceStability.Unstable + public static ProtobufRpcEngineCallback2 registerForDeferredResponse() { + ProtobufRpcEngineCallback2 callback = new ProtobufRpcEngineCallbackImpl(); + CURRENT_CALLBACK.set(callback); + return callback; + } + + /** + * Construct an RPC server. + * + * @param protocolClass the class of protocol + * @param protocolImpl the protocolImpl whose methods will be called + * @param conf the configuration to use + * @param bindAddress the address to bind on to listen for connection + * @param port the port to listen for connections on + * @param numHandlers the number of method handler threads to run + * @param verbose whether each call should be logged + * @param portRangeConfig A config parameter that can be used to restrict + * the range of ports used when port is 0 (an ephemeral port) + * @param alignmentContext provides server state info on client responses + */ + public Server(Class protocolClass, Object protocolImpl, + Configuration conf, String bindAddress, int port, int numHandlers, + int numReaders, int queueSizePerHandler, boolean verbose, + SecretManager secretManager, + String portRangeConfig, AlignmentContext alignmentContext) + throws IOException { + super(bindAddress, port, null, numHandlers, + numReaders, queueSizePerHandler, conf, + serverNameFromClass(protocolImpl.getClass()), secretManager, + portRangeConfig); + setAlignmentContext(alignmentContext); + this.verbose = verbose; + registerProtocolAndImpl(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocolClass, + protocolImpl); + } + + /** + * Protobuf invoker for {@link RpcInvoker}. + */ + static class ProtoBufRpcInvoker implements RpcInvoker { + private static ProtoClassProtoImpl getProtocolImpl(RPC.Server server, + String protoName, long clientVersion) throws RpcServerException { + ProtoNameVer pv = new ProtoNameVer(protoName, clientVersion); + ProtoClassProtoImpl impl = + server.getProtocolImplMap(RPC.RpcKind.RPC_PROTOCOL_BUFFER).get(pv); + if (impl == null) { // no match for Protocol AND Version + VerProtocolImpl highest = server.getHighestSupportedProtocol( + RPC.RpcKind.RPC_PROTOCOL_BUFFER, protoName); + if (highest == null) { + throw new RpcNoSuchProtocolException( + "Unknown protocol: " + protoName); + } + // protocol supported but not the version that client wants + throw new RPC.VersionMismatch(protoName, clientVersion, + highest.version); + } + return impl; + } + + @Override + /** + * This is a server side method, which is invoked over RPC. On success + * the return response has protobuf response payload. On failure, the + * exception name and the stack trace are returned in the response. + * See {@link HadoopRpcResponseProto} + * + * In this method there three types of exceptions possible and they are + * returned in response as follows. + *
    + *
  1. Exceptions encountered in this method that are returned + * as {@link RpcServerException}
  2. + *
  3. Exceptions thrown by the service is wrapped in ServiceException. + * In that this method returns in response the exception thrown by the + * service.
  4. + *
  5. Other exceptions thrown by the service. They are returned as + * it is.
  6. + *
+ */ + public Writable call(RPC.Server server, String connectionProtocolName, + Writable writableRequest, long receiveTime) throws Exception { + RpcProtobufRequest request = (RpcProtobufRequest) writableRequest; + RequestHeaderProto rpcRequest = request.getRequestHeader(); + String methodName = rpcRequest.getMethodName(); + + /** + * RPCs for a particular interface (ie protocol) are done using a + * IPC connection that is setup using rpcProxy. + * The rpcProxy's has a declared protocol name that is + * sent form client to server at connection time. + * + * Each Rpc call also sends a protocol name + * (called declaringClassprotocolName). This name is usually the same + * as the connection protocol name except in some cases. + * For example metaProtocols such ProtocolInfoProto which get info + * about the protocol reuse the connection but need to indicate that + * the actual protocol is different (i.e. the protocol is + * ProtocolInfoProto) since they reuse the connection; in this case + * the declaringClassProtocolName field is set to the ProtocolInfoProto. + */ + + String declaringClassProtoName = + rpcRequest.getDeclaringClassProtocolName(); + long clientVersion = rpcRequest.getClientProtocolVersion(); + if (server.verbose) { + LOG.info("Call: connectionProtocolName=" + connectionProtocolName + + ", method=" + methodName); + } + + ProtoClassProtoImpl protocolImpl = getProtocolImpl(server, + declaringClassProtoName, clientVersion); + BlockingService service = (BlockingService) protocolImpl.protocolImpl; + MethodDescriptor methodDescriptor = service.getDescriptorForType() + .findMethodByName(methodName); + if (methodDescriptor == null) { + String msg = "Unknown method " + methodName + " called on " + + connectionProtocolName + " protocol."; + LOG.warn(msg); + throw new RpcNoSuchMethodException(msg); + } + Message prototype = service.getRequestPrototype(methodDescriptor); + Message param = request.getValue(prototype); + + Message result; + Call currentCall = Server.getCurCall().get(); + try { + server.rpcDetailedMetrics.init(protocolImpl.protocolClass); + CURRENT_CALL_INFO.set(new CallInfo(server, methodName)); + currentCall.setDetailedMetricsName(methodName); + result = service.callBlockingMethod(methodDescriptor, null, param); + // Check if this needs to be a deferred response, + // by checking the ThreadLocal callback being set + if (CURRENT_CALLBACK.get() != null) { + currentCall.deferResponse(); + CURRENT_CALLBACK.set(null); + return null; + } + } catch (ServiceException e) { + Exception exception = (Exception) e.getCause(); + currentCall.setDetailedMetricsName( + exception.getClass().getSimpleName()); + throw (Exception) e.getCause(); + } catch (Exception e) { + currentCall.setDetailedMetricsName(e.getClass().getSimpleName()); + throw e; + } finally { + CURRENT_CALL_INFO.set(null); + } + return RpcWritable.wrap(result); + } + } + } + + // htrace in the ipc layer creates the span name based on toString() + // which uses the rpc header. in the normal case we want to defer decoding + // the rpc header until needed by the rpc engine. + static class RpcProtobufRequest extends RpcWritable.Buffer { + private volatile RequestHeaderProto requestHeader; + private Message payload; + + RpcProtobufRequest() { + } + + RpcProtobufRequest(RequestHeaderProto header, Message payload) { + this.requestHeader = header; + this.payload = payload; + } + + RequestHeaderProto getRequestHeader() throws IOException { + if (getByteBuffer() != null && requestHeader == null) { + requestHeader = getValue(RequestHeaderProto.getDefaultInstance()); + } + return requestHeader; + } + + @Override + public void writeTo(ResponseBuffer out) throws IOException { + requestHeader.writeDelimitedTo(out); + if (payload != null) { + payload.writeDelimitedTo(out); + } + } + + // this is used by htrace to name the span. + @Override + public String toString() { + try { + RequestHeaderProto header = getRequestHeader(); + return header.getDeclaringClassProtocolName() + "." + + header.getMethodName(); + } catch (IOException e) { + throw new IllegalArgumentException(e); + } + } + } +} diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngineCallback.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngineCallback.java index 50b70ca4bec..f85adb17d3f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngineCallback.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngineCallback.java @@ -18,12 +18,17 @@ package org.apache.hadoop.ipc; -import org.apache.hadoop.thirdparty.protobuf.Message; +import com.google.protobuf.Message; +/** + * This engine uses Protobuf 2.5.0. Recommended to upgrade to Protobuf 3.x + * from hadoop-thirdparty and use ProtobufRpcEngineCallback2. + */ +@Deprecated public interface ProtobufRpcEngineCallback { - public void setResponse(Message message); + void setResponse(Message message); - public void error(Throwable t); + void error(Throwable t); } \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngineCallback2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngineCallback2.java new file mode 100644 index 00000000000..e8c09f56282 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngineCallback2.java @@ -0,0 +1,29 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.ipc; + +import org.apache.hadoop.thirdparty.protobuf.Message; + +public interface ProtobufRpcEngineCallback2 { + + public void setResponse(Message message); + + public void error(Throwable t); + +} \ No newline at end of file diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java index 4f95863b03d..e794cb913c2 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java @@ -1043,7 +1043,7 @@ public class RPC { private void initProtocolMetaInfo(Configuration conf) { RPC.setProtocolEngine(conf, ProtocolMetaInfoPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); ProtocolMetaInfoServerSideTranslatorPB xlator = new ProtocolMetaInfoServerSideTranslatorPB(this); BlockingService protocolInfoBlockingService = ProtocolInfoService @@ -1067,7 +1067,7 @@ public class RPC { @Override public Writable call(RPC.RpcKind rpcKind, String protocol, Writable rpcRequest, long receiveTime) throws Exception { - return getRpcInvoker(rpcKind).call(this, protocol, rpcRequest, + return getServerRpcInvoker(rpcKind).call(this, protocol, rpcRequest, receiveTime); } } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java index 84ecba1d34e..0ce78e54a43 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java @@ -114,7 +114,7 @@ public class RpcClientUtil { if (versionMap == null) { Configuration conf = new Configuration(); RPC.setProtocolEngine(conf, ProtocolMetaInfoPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); ProtocolMetaInfoPB protocolInfoProxy = getProtocolMetaInfoProxy(rpcProxy, conf); GetProtocolSignatureRequestProto.Builder builder = diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java index 6604bd0cc1c..f5f0d071f39 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcWritable.java @@ -42,6 +42,8 @@ public abstract class RpcWritable implements Writable { return (RpcWritable)o; } else if (o instanceof Message) { return new ProtobufWrapper((Message)o); + } else if (o instanceof com.google.protobuf.Message) { + return new ProtobufWrapperLegacy((com.google.protobuf.Message) o); } else if (o instanceof Writable) { return new WritableWrapper((Writable)o); } @@ -132,6 +134,49 @@ public abstract class RpcWritable implements Writable { } } + // adapter for Protobufs. + static class ProtobufWrapperLegacy extends RpcWritable { + private com.google.protobuf.Message message; + + ProtobufWrapperLegacy(com.google.protobuf.Message message) { + this.message = message; + } + + com.google.protobuf.Message getMessage() { + return message; + } + + @Override + void writeTo(ResponseBuffer out) throws IOException { + int length = message.getSerializedSize(); + length += com.google.protobuf.CodedOutputStream. + computeUInt32SizeNoTag(length); + out.ensureCapacity(length); + message.writeDelimitedTo(out); + } + + @SuppressWarnings("unchecked") + @Override + T readFrom(ByteBuffer bb) throws IOException { + // using the parser with a byte[]-backed coded input stream is the + // most efficient way to deserialize a protobuf. it has a direct + // path to the PB ctor that doesn't create multi-layered streams + // that internally buffer. + com.google.protobuf.CodedInputStream cis = + com.google.protobuf.CodedInputStream.newInstance( + bb.array(), bb.position() + bb.arrayOffset(), bb.remaining()); + try { + cis.pushLimit(cis.readRawVarint32()); + message = message.getParserForType().parseFrom(cis); + cis.checkLastTagWas(0); + } finally { + // advance over the bytes read. + bb.position(bb.position() + cis.getTotalBytesRead()); + } + return (T)message; + } + } + /** * adapter to allow decoding of writables and protobufs from a byte buffer. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java index 4448164f4b1..907d55f9be3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java @@ -304,7 +304,11 @@ public abstract class Server { RpcKindMapValue val = rpcKindMap.get(ProtoUtil.convert(rpcKind)); return (val == null) ? null : val.rpcRequestWrapperClass; } - + + protected RpcInvoker getServerRpcInvoker(RPC.RpcKind rpcKind) { + return getRpcInvoker(rpcKind); + } + public static RpcInvoker getRpcInvoker(RPC.RpcKind rpcKind) { RpcKindMapValue val = rpcKindMap.get(rpcKind); return (val == null) ? null : val.rpcInvoker; @@ -2688,15 +2692,15 @@ public abstract class Server { call.setPriorityLevel(callQueue.getPriorityLevel(call)); call.markCallCoordinated(false); if(alignmentContext != null && call.rpcRequest != null && - (call.rpcRequest instanceof ProtobufRpcEngine.RpcProtobufRequest)) { + (call.rpcRequest instanceof ProtobufRpcEngine2.RpcProtobufRequest)) { // if call.rpcRequest is not RpcProtobufRequest, will skip the following // step and treat the call as uncoordinated. As currently only certain // ClientProtocol methods request made through RPC protobuf needs to be // coordinated. String methodName; String protoName; - ProtobufRpcEngine.RpcProtobufRequest req = - (ProtobufRpcEngine.RpcProtobufRequest) call.rpcRequest; + ProtobufRpcEngine2.RpcProtobufRequest req = + (ProtobufRpcEngine2.RpcProtobufRequest) call.rpcRequest; try { methodName = req.getRequestHeader().getMethodName(); protoName = req.getRequestHeader().getDeclaringClassProtocolName(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java index 766fb0a6557..130414c2895 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/tracing/TraceAdmin.java @@ -28,7 +28,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; @@ -179,7 +179,7 @@ public class TraceAdmin extends Configured implements Tool { servicePrincipal); } RPC.setProtocolEngine(getConf(), TraceAdminProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); InetSocketAddress address = NetUtils.createSocketAddr(hostPort); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); Class xface = TraceAdminProtocolPB.class; diff --git a/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine2.proto b/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine2.proto new file mode 100644 index 00000000000..16ee880e7b7 --- /dev/null +++ b/hadoop-common-project/hadoop-common/src/main/proto/ProtobufRpcEngine2.proto @@ -0,0 +1,67 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * These .proto interfaces are private and stable. + * Please see http://wiki.apache.org/hadoop/Compatibility + * for what changes are allowed for a *stable* .proto interface. + */ +syntax = "proto2"; +/** + * These are the messages used by Hadoop RPC for the Rpc Engine Protocol Buffer + * to marshal the request and response in the RPC layer. + * The messages are sent in addition to the normal RPC header as + * defined in RpcHeader.proto + */ +option java_package = "org.apache.hadoop.ipc.protobuf"; +option java_outer_classname = "ProtobufRpcEngine2Protos"; +option java_generate_equals_and_hash = true; +package hadoop.common; + +/** + * This message is the header for the Protobuf Rpc Engine + * when sending a RPC request from RPC client to the RPC server. + * The actual request (serialized as protobuf) follows this request. + * + * No special header is needed for the Rpc Response for Protobuf Rpc Engine. + * The normal RPC response header (see RpcHeader.proto) are sufficient. + */ +message RequestHeaderProto { + /** Name of the RPC method */ + required string methodName = 1; + + /** + * RPCs for a particular interface (ie protocol) are done using a + * IPC connection that is setup using rpcProxy. + * The rpcProxy's has a declared protocol name that is + * sent form client to server at connection time. + * + * Each Rpc call also sends a protocol name + * (called declaringClassprotocolName). This name is usually the same + * as the connection protocol name except in some cases. + * For example metaProtocols such ProtocolInfoProto which get metainfo + * about the protocol reuse the connection but need to indicate that + * the actual protocol is different (i.e. the protocol is + * ProtocolInfoProto) since they reuse the connection; in this case + * the declaringClassProtocolName field is set to the ProtocolInfoProto + */ + required string declaringClassProtocolName = 2; + + /** protocol version of class declaring the called method */ + required uint64 clientProtocolVersion = 3; +} diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java index 6505fbb8224..0c2530739fa 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ha/DummyHAService.java @@ -28,7 +28,7 @@ import org.apache.hadoop.ha.HAServiceProtocol.HAServiceState; import org.apache.hadoop.ha.protocolPB.HAServiceProtocolPB; import org.apache.hadoop.ha.protocolPB.HAServiceProtocolServerSideTranslatorPB; import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.HAServiceProtocolService; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; @@ -119,7 +119,7 @@ class DummyHAService extends HAServiceTarget { try { RPC.setProtocolEngine(conf, - HAServiceProtocolPB.class, ProtobufRpcEngine.class); + HAServiceProtocolPB.class, ProtobufRpcEngine2.class); HAServiceProtocolServerSideTranslatorPB haServiceProtocolXlator = new HAServiceProtocolServerSideTranslatorPB(new MockHAProtocolImpl()); BlockingService haPbService = HAServiceProtocolService diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java index bbb4ec21812..e7130d4da8c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/RPCCallBenchmark.java @@ -66,7 +66,7 @@ public class RPCCallBenchmark extends TestRpcBase implements Tool { public int secondsToRun = 15; private int msgSize = 1024; public Class rpcEngine = - ProtobufRpcEngine.class; + ProtobufRpcEngine2.class; private MyOptions(String args[]) { try { @@ -181,7 +181,7 @@ public class RPCCallBenchmark extends TestRpcBase implements Tool { if (line.hasOption('e')) { String eng = line.getOptionValue('e'); if ("protobuf".equals(eng)) { - rpcEngine = ProtobufRpcEngine.class; + rpcEngine = ProtobufRpcEngine2.class; } else { throw new ParseException("invalid engine: " + eng); } @@ -224,7 +224,7 @@ public class RPCCallBenchmark extends TestRpcBase implements Tool { RPC.Server server; // Get RPC server for server side implementation - if (opts.rpcEngine == ProtobufRpcEngine.class) { + if (opts.rpcEngine == ProtobufRpcEngine2.class) { // Create server side implementation PBServerImpl serverImpl = new PBServerImpl(); BlockingService service = TestProtobufRpcProto @@ -378,7 +378,7 @@ public class RPCCallBenchmark extends TestRpcBase implements Tool { private RpcServiceWrapper createRpcClient(MyOptions opts) throws IOException { InetSocketAddress addr = NetUtils.createSocketAddr(opts.host, opts.getPort()); - if (opts.rpcEngine == ProtobufRpcEngine.class) { + if (opts.rpcEngine == ProtobufRpcEngine2.class) { final TestRpcService proxy = RPC.getProxy(TestRpcService.class, 0, addr, conf); return new RpcServiceWrapper() { @Override diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java index 10e23baefef..c1b08586976 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestMultipleProtocolServer.java @@ -45,7 +45,7 @@ public class TestMultipleProtocolServer extends TestRpcBase { // Set RPC engine to protobuf RPC engine Configuration conf2 = new Configuration(); RPC.setProtocolEngine(conf2, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); TestRpcService client = RPC.getProxy(TestRpcService.class, 0, addr, conf2); TestProtoBufRpc.testProtoBufRpc(client); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java index dfb9e934f60..d813c6b784f 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRPCCompatibility.java @@ -25,8 +25,6 @@ import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeys; -import org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto; -import org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto; import org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto; import org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto; import org.apache.hadoop.ipc.protobuf.TestProtos.OptRequestProto; @@ -138,7 +136,7 @@ public class TestProtoBufRPCCompatibility { conf = new Configuration(); conf.setInt(CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH, 1024); // Set RPC engine to protobuf RPC engine - RPC.setProtocolEngine(conf, NewRpcService.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, NewRpcService.class, ProtobufRpcEngine2.class); // Create server side implementation NewServerImpl serverImpl = new NewServerImpl(); @@ -151,7 +149,7 @@ public class TestProtoBufRPCCompatibility { server.start(); - RPC.setProtocolEngine(conf, OldRpcService.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, OldRpcService.class, ProtobufRpcEngine2.class); OldRpcService proxy = RPC.getProxy(OldRpcService.class, 0, addr, conf); // Verify that exception is thrown if protocolVersion is mismatch between @@ -168,7 +166,8 @@ public class TestProtoBufRPCCompatibility { } // Verify that missing of optional field is still compatible in RPC call. - RPC.setProtocolEngine(conf, NewerRpcService.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, NewerRpcService.class, + ProtobufRpcEngine2.class); NewerRpcService newProxy = RPC.getProxy(NewerRpcService.class, 0, addr, conf); newProxy.echo(null, emptyRequest); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java index facb8fdd8b1..06c36463104 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java @@ -94,8 +94,9 @@ public class TestProtoBufRpc extends TestRpcBase { conf.setInt(CommonConfigurationKeys.IPC_MAXIMUM_DATA_LENGTH, 1024); conf.setBoolean(CommonConfigurationKeys.IPC_SERVER_LOG_SLOW_RPC, true); // Set RPC engine to protobuf RPC engine - RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine.class); - RPC.setProtocolEngine(conf, TestRpcService2.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine2.class); + RPC.setProtocolEngine(conf, TestRpcService2.class, + ProtobufRpcEngine2.class); // Create server side implementation PBServerImpl serverImpl = new PBServerImpl(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java index 32300d4f876..922e9192c41 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpcServerHandoff.java @@ -52,7 +52,7 @@ public class TestProtoBufRpcServerHandoff { TestProtobufRpcHandoffProto.newReflectiveBlockingService(serverImpl); RPC.setProtocolEngine(conf, TestProtoBufRpcServerHandoffProtocol.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); RPC.Server server = new RPC.Builder(conf) .setProtocol(TestProtoBufRpcServerHandoffProtocol.class) .setInstance(blockingService) @@ -144,8 +144,8 @@ public class TestProtoBufRpcServerHandoff { TestProtos.SleepRequestProto2 request) throws ServiceException { final long startTime = System.currentTimeMillis(); - final ProtobufRpcEngineCallback callback = - ProtobufRpcEngine.Server.registerForDeferredResponse(); + final ProtobufRpcEngineCallback2 callback = + ProtobufRpcEngine2.Server.registerForDeferredResponse(); final long sleepTime = request.getSleepTime(); new Thread() { @Override diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java index ffee086fa98..22fdcbbe14e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCCompatibility.java @@ -114,19 +114,19 @@ public class TestRPCCompatibility { ProtocolSignature.resetCache(); RPC.setProtocolEngine(conf, - TestProtocol0.class, ProtobufRpcEngine.class); + TestProtocol0.class, ProtobufRpcEngine2.class); RPC.setProtocolEngine(conf, - TestProtocol1.class, ProtobufRpcEngine.class); + TestProtocol1.class, ProtobufRpcEngine2.class); RPC.setProtocolEngine(conf, - TestProtocol2.class, ProtobufRpcEngine.class); + TestProtocol2.class, ProtobufRpcEngine2.class); RPC.setProtocolEngine(conf, - TestProtocol3.class, ProtobufRpcEngine.class); + TestProtocol3.class, ProtobufRpcEngine2.class); RPC.setProtocolEngine(conf, - TestProtocol4.class, ProtobufRpcEngine.class); + TestProtocol4.class, ProtobufRpcEngine2.class); } @After diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCWaitForProxy.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCWaitForProxy.java index d810fe3c5a1..90973d2674c 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCWaitForProxy.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRPCWaitForProxy.java @@ -44,7 +44,7 @@ public class TestRPCWaitForProxy extends TestRpcBase { @Before public void setupProtocolEngine() { RPC.setProtocolEngine(conf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); } /** diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestReuseRpcConnections.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestReuseRpcConnections.java index 2729dc3cd9d..65558a7980a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestReuseRpcConnections.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestReuseRpcConnections.java @@ -26,7 +26,6 @@ import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.TestConnectionRetryPolicy; import org.apache.hadoop.ipc.Client.ConnectionId; -import org.apache.hadoop.ipc.TestRpcBase.TestRpcService; import org.junit.Before; import org.junit.Test; @@ -129,7 +128,7 @@ public class TestReuseRpcConnections extends TestRpcBase { try { proxy1 = getClient(addr, newConf, retryPolicy1); proxy1.ping(null, newEmptyRequest()); - client = ProtobufRpcEngine.getClient(newConf); + client = ProtobufRpcEngine2.getClient(newConf); final Set conns = client.getConnectionIds(); assertEquals("number of connections in cache is wrong", 1, conns.size()); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java index bf24d680dde..010935b6096 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestRpcBase.java @@ -70,7 +70,7 @@ public class TestRpcBase { protected void setupConf() { conf = new Configuration(); // Set RPC engine to protobuf RPC engine - RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java index 72f73822b6f..5f944574656 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestSaslRPC.java @@ -169,7 +169,7 @@ public class TestSaslRPC extends TestRpcBase { clientFallBackToSimpleAllowed = true; // Set RPC engine to protobuf RPC engine - RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine2.class); } static String getQOPNames (QualityOfProtection[] qops){ @@ -356,7 +356,7 @@ public class TestSaslRPC extends TestRpcBase { newConf.setInt(CommonConfigurationKeysPublic.IPC_CLIENT_CONNECTION_MAXIDLETIME_KEY, timeouts[0]); proxy1 = getClient(addr, newConf); proxy1.getAuthMethod(null, newEmptyRequest()); - client = ProtobufRpcEngine.getClient(newConf); + client = ProtobufRpcEngine2.getClient(newConf); Set conns = client.getConnectionIds(); assertEquals("number of connections in cache is wrong", 1, conns.size()); // same conf, connection should be re-used diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java index c86b9ae3441..edd537011c4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/TestDoAsEffectiveUser.java @@ -21,7 +21,7 @@ import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; import org.apache.hadoop.io.Text; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.TestRpcBase; @@ -151,7 +151,7 @@ public class TestDoAsEffectiveUser extends TestRpcBase { configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME); // Set RPC engine to protobuf RPC engine RPC.setProtocolEngine(conf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); final Server server = setupTestServer(conf, 5); @@ -181,7 +181,7 @@ public class TestDoAsEffectiveUser extends TestRpcBase { getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1"); RPC.setProtocolEngine(conf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); final Server server = setupTestServer(conf, 5); @@ -215,7 +215,7 @@ public class TestDoAsEffectiveUser extends TestRpcBase { getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1"); RPC.setProtocolEngine(conf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); final Server server = setupTestServer(conf, 5); @@ -251,7 +251,7 @@ public class TestDoAsEffectiveUser extends TestRpcBase { conf.setStrings(DefaultImpersonationProvider.getTestProvider(). getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group1"); RPC.setProtocolEngine(conf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); final Server server = setupTestServer(conf, 2); @@ -286,7 +286,7 @@ public class TestDoAsEffectiveUser extends TestRpcBase { final Configuration conf = new Configuration(); configureSuperUserIPAddresses(conf, REAL_USER_SHORT_NAME); RPC.setProtocolEngine(conf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); final Server server = setupTestServer(conf, 2); @@ -322,7 +322,7 @@ public class TestDoAsEffectiveUser extends TestRpcBase { getProxySuperuserGroupConfKey(REAL_USER_SHORT_NAME), "group3"); RPC.setProtocolEngine(conf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); final Server server = setupTestServer(conf, 2); @@ -363,7 +363,7 @@ public class TestDoAsEffectiveUser extends TestRpcBase { TestTokenSecretManager sm = new TestTokenSecretManager(); SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, conf); RPC.setProtocolEngine(conf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); final Server server = setupTestServer(conf, 5, sm); @@ -411,7 +411,7 @@ public class TestDoAsEffectiveUser extends TestRpcBase { SecurityUtil.setAuthenticationMethod(AuthenticationMethod.KERBEROS, newConf); // Set RPC engine to protobuf RPC engine RPC.setProtocolEngine(newConf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(newConf); final Server server = setupTestServer(newConf, 5, sm); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/NameNodeProxiesClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/NameNodeProxiesClient.java index c640b39b6f4..68577aad825 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/NameNodeProxiesClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/NameNodeProxiesClient.java @@ -56,7 +56,7 @@ import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryProxy; import org.apache.hadoop.io.retry.RetryUtils; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.SecurityUtil; @@ -355,7 +355,7 @@ public class NameNodeProxiesClient { AlignmentContext alignmentContext) throws IOException { RPC.setProtocolEngine(conf, ClientNamenodeProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); final RetryPolicy defaultPolicy = RetryUtils.getDefaultRetryPolicy( diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java index 4028b0e8fb2..47234e8b65d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java @@ -68,7 +68,7 @@ import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus; import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus.Result; import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; @@ -181,7 +181,7 @@ public class ClientDatanodeProtocolTranslatorPB implements InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, SocketFactory factory, int socketTimeout) throws IOException { RPC.setProtocolEngine(conf, ClientDatanodeProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); return RPC.getProxy(ClientDatanodeProtocolPB.class, RPC.getProtocolVersion(ClientDatanodeProtocolPB.class), addr, ticket, conf, factory, socketTimeout); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java index 572c65b67b2..f6a3f5db0b7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolTranslatorPB.java @@ -237,7 +237,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.retry.AsyncCallHandler; import org.apache.hadoop.ipc.Client; import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; @@ -456,7 +456,7 @@ public class ClientNamenodeProtocolTranslatorPB implements private void setAsyncReturnValue() { final AsyncGet asyncReturnMessage - = ProtobufRpcEngine.getAsyncReturnMessage(); + = ProtobufRpcEngine2.getAsyncReturnMessage(); final AsyncGet asyncGet = new AsyncGet() { @Override @@ -1569,7 +1569,7 @@ public class ClientNamenodeProtocolTranslatorPB implements if (Client.isAsynchronousMode()) { rpcProxy.getAclStatus(null, req); final AsyncGet asyncReturnMessage - = ProtobufRpcEngine.getAsyncReturnMessage(); + = ProtobufRpcEngine2.getAsyncReturnMessage(); final AsyncGet asyncGet = new AsyncGet() { @Override diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java index 5165887ece5..ce8a89b84ac 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/protocolPB/ReconfigurationProtocolTranslatorPB.java @@ -34,7 +34,7 @@ import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListR import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListReconfigurablePropertiesResponseProto; import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.StartReconfigurationRequestProto; import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.ProtocolTranslator; import org.apache.hadoop.ipc.RPC; @@ -84,7 +84,7 @@ public class ReconfigurationProtocolTranslatorPB implements InetSocketAddress addr, UserGroupInformation ticket, Configuration conf, SocketFactory factory, int socketTimeout) throws IOException { RPC.setProtocolEngine(conf, ReconfigurationProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); return RPC.getProxy(ReconfigurationProtocolPB.class, RPC.getProtocolVersion(ReconfigurationProtocolPB.class), addr, ticket, conf, factory, socketTimeout); diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionPool.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionPool.java index b84848089a3..60924990cfd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionPool.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/ConnectionPool.java @@ -47,7 +47,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocol; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryUtils; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.RefreshUserMappingsProtocol; @@ -379,7 +379,7 @@ public class ConnectionPool { throw new IllegalStateException(msg); } ProtoImpl classes = PROTO_MAP.get(proto); - RPC.setProtocolEngine(conf, classes.protoPb, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, classes.protoPb, ProtobufRpcEngine2.class); final RetryPolicy defaultPolicy = RetryUtils.getDefaultRetryPolicy(conf, HdfsClientConfigKeys.Retry.POLICY_ENABLED_KEY, diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterAdminServer.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterAdminServer.java index 5fd7c79f88f..1df0844a338 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterAdminServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterAdminServer.java @@ -75,7 +75,7 @@ import org.apache.hadoop.hdfs.server.federation.store.protocol.UpdateMountTableE import org.apache.hadoop.hdfs.server.federation.store.protocol.UpdateMountTableEntryResponse; import org.apache.hadoop.hdfs.server.federation.store.records.MountTable; import org.apache.hadoop.hdfs.server.namenode.NameNode; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC.Server; import org.apache.hadoop.ipc.RefreshRegistry; @@ -136,7 +136,7 @@ public class RouterAdminServer extends AbstractService RBFConfigKeys.DFS_ROUTER_ADMIN_HANDLER_COUNT_DEFAULT); RPC.setProtocolEngine(this.conf, RouterAdminProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); RouterAdminProtocolServerSideTranslatorPB routerAdminProtocolTranslator = new RouterAdminProtocolServerSideTranslatorPB(this); diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterClient.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterClient.java index 0641c0b82af..ee29b7dd2b5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterClient.java @@ -28,7 +28,7 @@ import org.apache.hadoop.hdfs.protocolPB.RouterAdminProtocolPB; import org.apache.hadoop.hdfs.protocolPB.RouterAdminProtocolTranslatorPB; import org.apache.hadoop.hdfs.server.federation.resolver.MountTableManager; import org.apache.hadoop.hdfs.server.federation.resolver.RouterGenericManager; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; @@ -47,7 +47,7 @@ public class RouterClient implements Closeable { throws IOException { RPC.setProtocolEngine( - conf, RouterAdminProtocolPB.class, ProtobufRpcEngine.class); + conf, RouterAdminProtocolPB.class, ProtobufRpcEngine2.class); AtomicBoolean fallbackToSimpleAuth = new AtomicBoolean(false); final long version = RPC.getProtocolVersion(RouterAdminProtocolPB.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java index 345ec705f2c..4f1310bb259 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/server/federation/router/RouterRpcServer.java @@ -133,7 +133,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC.Server; import org.apache.hadoop.ipc.RemoteException; @@ -256,7 +256,7 @@ public class RouterRpcServer extends AbstractService implements ClientProtocol, readerQueueSize); RPC.setProtocolEngine(this.conf, ClientNamenodeProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); ClientNamenodeProtocolServerSideTranslatorPB clientProtocolServerTranslator = diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/tools/federation/RouterAdmin.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/tools/federation/RouterAdmin.java index 5ea33237b67..7422989d6aa 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/tools/federation/RouterAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/main/java/org/apache/hadoop/hdfs/tools/federation/RouterAdmin.java @@ -71,7 +71,7 @@ import org.apache.hadoop.hdfs.server.federation.store.protocol.RemoveMountTableE import org.apache.hadoop.hdfs.server.federation.store.protocol.UpdateMountTableEntryRequest; import org.apache.hadoop.hdfs.server.federation.store.protocol.UpdateMountTableEntryResponse; import org.apache.hadoop.hdfs.server.federation.store.records.MountTable; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RefreshResponse; import org.apache.hadoop.ipc.RemoteException; @@ -1222,7 +1222,7 @@ public class RouterAdmin extends Configured implements Tool { InetSocketAddress address = NetUtils.createSocketAddr(hostport); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); - RPC.setProtocolEngine(conf, xface, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, xface, ProtobufRpcEngine2.class); GenericRefreshProtocolPB proxy = (GenericRefreshProtocolPB)RPC.getProxy( xface, RPC.getProtocolVersion(xface), address, ugi, conf, NetUtils.getDefaultSocketFactory(conf), 0); diff --git a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/MockNamenode.java b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/MockNamenode.java index 7c80ad64bc6..f9080653841 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/MockNamenode.java +++ b/hadoop-hdfs-project/hadoop-hdfs-rbf/src/test/java/org/apache/hadoop/hdfs/server/federation/MockNamenode.java @@ -90,7 +90,7 @@ import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.http.HttpServer2; import org.apache.hadoop.io.Text; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC.Server; import org.apache.hadoop.ipc.RemoteException; @@ -174,7 +174,7 @@ public class MockNamenode { */ private void setupRPCServer(final Configuration conf) throws IOException { RPC.setProtocolEngine( - conf, ClientNamenodeProtocolPB.class, ProtobufRpcEngine.class); + conf, ClientNamenodeProtocolPB.class, ProtobufRpcEngine2.class); ClientNamenodeProtocolServerSideTranslatorPB clientNNProtoXlator = new ClientNamenodeProtocolServerSideTranslatorPB(mockNn); diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml index 4de50c27398..7f1b3bc1423 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml +++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml @@ -349,6 +349,9 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd"> replace-sources false + + **/DFSUtil.java +
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java index c5ba8b96cf3..0facfd967d1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/DFSUtil.java @@ -72,6 +72,7 @@ import org.apache.hadoop.fs.ParentNotDirectoryException; import org.apache.hadoop.fs.UnresolvedLinkException; import org.apache.hadoop.hdfs.server.namenode.FSDirectory; import org.apache.hadoop.hdfs.server.namenode.INodesInPath; +import org.apache.hadoop.ipc.ProtobufRpcEngine; import org.apache.hadoop.security.AccessControlException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -92,7 +93,7 @@ import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.web.AuthFilterInitializer; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.http.HttpServer2; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.AuthenticationFilterInitializer; @@ -1295,6 +1296,27 @@ public class DFSUtil { */ public static void addPBProtocol(Configuration conf, Class protocol, BlockingService service, RPC.Server server) throws IOException { + RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine2.class); + server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, service); + } + + /** + * Add protobuf based protocol to the {@link RPC.Server}. + * This engine uses Protobuf 2.5.0. Recommended to upgrade to + * Protobuf 3.x from hadoop-thirdparty and use + * {@link DFSUtil#addPBProtocol(Configuration, Class, BlockingService, + * RPC.Server)}. + * @param conf configuration + * @param protocol Protocol interface + * @param service service that implements the protocol + * @param server RPC server to which the protocol & implementation is + * added to + * @throws IOException + */ + @Deprecated + public static void addPBProtocol(Configuration conf, Class protocol, + com.google.protobuf.BlockingService service, RPC.Server server) + throws IOException { RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine.class); server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, service); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java index 3063083db88..2a56ef3e186 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/NameNodeProxies.java @@ -48,7 +48,7 @@ import org.apache.hadoop.io.retry.RetryPolicies; import org.apache.hadoop.io.retry.RetryPolicy; import org.apache.hadoop.io.retry.RetryProxy; import org.apache.hadoop.ipc.AlignmentContext; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProxyCombiner; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RefreshCallQueueProtocol; @@ -305,7 +305,7 @@ public class NameNodeProxies { private static T createNameNodeProxy(InetSocketAddress address, Configuration conf, UserGroupInformation ugi, Class xface, int rpcTimeout, AlignmentContext alignmentContext) throws IOException { - RPC.setProtocolEngine(conf, xface, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, xface, ProtobufRpcEngine2.class); return RPC.getProtocolProxy(xface, RPC.getProtocolVersion(xface), address, ugi, conf, NetUtils.getDefaultSocketFactory(conf), rpcTimeout, null, null, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java index 050073fb952..220e9e28356 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeLifelineProtocolClientSideTranslatorPB.java @@ -30,7 +30,7 @@ import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; @@ -57,7 +57,7 @@ public class DatanodeLifelineProtocolClientSideTranslatorPB implements public DatanodeLifelineProtocolClientSideTranslatorPB( InetSocketAddress nameNodeAddr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, DatanodeLifelineProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); rpcProxy = createNamenode(nameNodeAddr, conf, ugi); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java index 6ab98e5880c..b512d7e21d5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/DatanodeProtocolClientSideTranslatorPB.java @@ -62,7 +62,7 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks; import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; @@ -99,7 +99,7 @@ public class DatanodeProtocolClientSideTranslatorPB implements public DatanodeProtocolClientSideTranslatorPB(InetSocketAddress nameNodeAddr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, DatanodeProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); rpcProxy = createNamenode(nameNodeAddr, conf, ugi); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java index 64d57562a18..031b0e4512a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/InterDatanodeProtocolTranslatorPB.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlo import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol; import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo; import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolMetaInterface; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RpcClientUtil; @@ -62,7 +62,7 @@ public class InterDatanodeProtocolTranslatorPB implements int socketTimeout) throws IOException { RPC.setProtocolEngine(conf, InterDatanodeProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); rpcProxy = RPC.getProxy(InterDatanodeProtocolPB.class, RPC.getProtocolVersion(InterDatanodeProtocolPB.class), addr, ugi, conf, factory, socketTimeout); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java index 3b3f89e7d9b..01c115e7d32 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/client/IPCLoggerChannel.java @@ -52,7 +52,7 @@ import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.util.StopWatch; @@ -235,13 +235,13 @@ public class IPCLoggerChannel implements AsyncLogger { true); RPC.setProtocolEngine(confCopy, - QJournalProtocolPB.class, ProtobufRpcEngine.class); + QJournalProtocolPB.class, ProtobufRpcEngine2.class); return SecurityUtil.doAsLoginUser( new PrivilegedExceptionAction() { @Override public QJournalProtocol run() throws IOException { RPC.setProtocolEngine(confCopy, - QJournalProtocolPB.class, ProtobufRpcEngine.class); + QJournalProtocolPB.class, ProtobufRpcEngine2.class); QJournalProtocolPB pbproxy = RPC.getProxy( QJournalProtocolPB.class, RPC.getProtocolVersion(QJournalProtocolPB.class), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java index 36f7faaedb0..ef44f21d2cf 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeRpcServer.java @@ -46,7 +46,7 @@ import org.apache.hadoop.hdfs.qjournal.protocolPB.QJournalProtocolServerSideTran import org.apache.hadoop.hdfs.server.common.StorageInfo; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC.Server; import org.apache.hadoop.net.NetUtils; @@ -85,7 +85,7 @@ public class JournalNodeRpcServer implements QJournalProtocol, LOG.info("RPC server is binding to " + bindHost + ":" + addr.getPort()); RPC.setProtocolEngine(confCopy, QJournalProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); QJournalProtocolServerSideTranslatorPB translator = new QJournalProtocolServerSideTranslatorPB(this); BlockingService service = QJournalProtocolService diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeSyncer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeSyncer.java index dc352c5d367..bb141d84374 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeSyncer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/qjournal/server/JournalNodeSyncer.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hdfs.server.common.Util; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog; import org.apache.hadoop.hdfs.util.DataTransferThrottler; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; @@ -505,7 +505,7 @@ public class JournalNodeSyncer { @Override public InterQJournalProtocol run() throws IOException { RPC.setProtocolEngine(confCopy, InterQJournalProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); InterQJournalProtocolPB interQJournalProtocolPB = RPC.getProxy( InterQJournalProtocolPB.class, RPC.getProtocolVersion(InterQJournalProtocolPB.class), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryLevelDBAliasMapServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryLevelDBAliasMapServer.java index f6ba4239d71..2ba22b1a90b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryLevelDBAliasMapServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/aliasmap/InMemoryLevelDBAliasMapServer.java @@ -19,7 +19,7 @@ package org.apache.hadoop.hdfs.server.aliasmap; import org.apache.hadoop.thirdparty.protobuf.BlockingService; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configurable; @@ -71,7 +71,7 @@ public class InMemoryLevelDBAliasMapServer implements InMemoryAliasMapProtocol, public void start() throws IOException { RPC.setProtocolEngine(getConf(), AliasMapProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); AliasMapProtocolServerSideTranslatorPB aliasMapProtocolXlator = new AliasMapProtocolServerSideTranslatorPB(this); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java index d390c1e5423..380343ded5e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/DataNode.java @@ -188,7 +188,7 @@ import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.io.IOUtils; import org.apache.hadoop.io.ReadaheadPool; import org.apache.hadoop.io.nativeio.NativeIO; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.util.MBeans; @@ -1015,7 +1015,7 @@ public class DataNode extends ReconfigurableBase // Add all the RPC protocols that the Datanode implements RPC.setProtocolEngine(getConf(), ClientDatanodeProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); ClientDatanodeProtocolServerSideTranslatorPB clientDatanodeProtocolXlator = new ClientDatanodeProtocolServerSideTranslatorPB(this); BlockingService service = ClientDatanodeProtocolService diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java index fcbd457d7a5..230e4020117 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/NameNodeRpcServer.java @@ -184,7 +184,7 @@ import org.apache.hadoop.hdfs.server.protocol.StorageReport; import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary; import org.apache.hadoop.io.EnumSetWritable; import org.apache.hadoop.io.Text; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RetriableException; import org.apache.hadoop.ipc.RetryCache; @@ -281,7 +281,7 @@ public class NameNodeRpcServer implements NamenodeProtocols { DFS_NAMENODE_HANDLER_COUNT_DEFAULT); RPC.setProtocolEngine(conf, ClientNamenodeProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); ClientNamenodeProtocolServerSideTranslatorPB clientProtocolServerTranslator = @@ -405,7 +405,7 @@ public class NameNodeRpcServer implements NamenodeProtocols { InetSocketAddress lifelineRpcAddr = nn.getLifelineRpcServerAddress(conf); if (lifelineRpcAddr != null) { RPC.setProtocolEngine(conf, HAServiceProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); String bindHost = nn.getLifelineRpcServerBindHost(conf); if (bindHost == null) { bindHost = lifelineRpcAddr.getHostName(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java index 04960e3c3e2..a167d116ff9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/DFSAdmin.java @@ -87,7 +87,7 @@ import org.apache.hadoop.hdfs.protocol.SnapshotException; import org.apache.hadoop.hdfs.server.namenode.NameNode; import org.apache.hadoop.hdfs.server.namenode.TransferFsImage; import org.apache.hadoop.io.MultipleIOException; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RefreshCallQueueProtocol; import org.apache.hadoop.ipc.RefreshResponse; @@ -2045,7 +2045,7 @@ public class DFSAdmin extends FsShell { InetSocketAddress address = NetUtils.createSocketAddr(hostport); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); - RPC.setProtocolEngine(conf, xface, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, xface, ProtobufRpcEngine2.class); GenericRefreshProtocolPB proxy = (GenericRefreshProtocolPB) RPC.getProxy(xface, RPC.getProtocolVersion(xface), address, ugi, conf, NetUtils.getDefaultSocketFactory(conf), 0); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java index 946358c7a61..f64e7006ea9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQJMWithFaults.java @@ -51,7 +51,7 @@ import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.hdfs.util.Holder; import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.test.GenericTestUtils; import org.apache.log4j.Level; import org.junit.Rule; @@ -225,7 +225,7 @@ public class TestQJMWithFaults { // If the user specifies a seed, then we should gather all the // IPC trace information so that debugging is easier. This makes // the test run about 25% slower otherwise. - GenericTestUtils.setLogLevel(ProtobufRpcEngine.LOG, Level.ALL); + GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.ALL); } else { seed = new Random().nextLong(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java index cd0216e2f26..b88c6433a2b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/client/TestQuorumJournalManager.java @@ -58,7 +58,7 @@ import org.apache.hadoop.hdfs.server.namenode.NNStorage; import org.apache.hadoop.hdfs.server.namenode.NameNodeLayoutVersion; import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo; import org.apache.hadoop.io.IOUtils; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.test.GenericTestUtils; import org.apache.log4j.Level; import org.junit.After; @@ -87,7 +87,7 @@ public class TestQuorumJournalManager { private final List toClose = Lists.newLinkedList(); static { - GenericTestUtils.setLogLevel(ProtobufRpcEngine.LOG, Level.ALL); + GenericTestUtils.setLogLevel(ProtobufRpcEngine2.LOG, Level.ALL); } @Rule diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java index 4c4c171d72a..c548b716f04 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/security/token/block/TestBlockToken.java @@ -74,7 +74,7 @@ import org.apache.hadoop.io.DataInputBuffer; import org.apache.hadoop.io.DataOutputBuffer; import org.apache.hadoop.io.TestWritable; import org.apache.hadoop.ipc.Client; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; @@ -314,7 +314,7 @@ public class TestBlockToken { .getReplicaVisibleLength(any(), any()); RPC.setProtocolEngine(conf, ClientDatanodeProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); BlockingService service = ClientDatanodeProtocolService .newReflectiveBlockingService(mockDN); return new RPC.Builder(conf).setProtocol(ClientDatanodeProtocolPB.class) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java index d13cc38cb8a..d57a7344fe0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/SnapshotTestHelper.java @@ -37,7 +37,7 @@ import org.apache.hadoop.hdfs.server.namenode.*; import org.apache.hadoop.hdfs.server.namenode.top.metrics.TopMetrics; import org.apache.hadoop.http.HttpRequestLog; import org.apache.hadoop.http.HttpServer2; -import org.apache.hadoop.ipc.ProtobufRpcEngine.Server; +import org.apache.hadoop.ipc.ProtobufRpcEngine2.Server; import org.apache.hadoop.metrics2.impl.MetricsSystemImpl; import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/HSClientProtocolPBClientImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/HSClientProtocolPBClientImpl.java index 156930325bc..5ff80809450 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/HSClientProtocolPBClientImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/HSClientProtocolPBClientImpl.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.mapreduce.v2.api.HSClientProtocol; import org.apache.hadoop.mapreduce.v2.api.HSClientProtocolPB; @@ -34,7 +34,7 @@ public class HSClientProtocolPBClientImpl extends MRClientProtocolPBClientImpl InetSocketAddress addr, Configuration conf) throws IOException { super(); RPC.setProtocolEngine(conf, HSClientProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); proxy = (HSClientProtocolPB)RPC.getProxy( HSClientProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java index efd48715283..7d8344841b8 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java @@ -24,7 +24,7 @@ import java.lang.reflect.UndeclaredThrowableException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol; @@ -108,8 +108,10 @@ public class MRClientProtocolPBClientImpl implements MRClientProtocol, public MRClientProtocolPBClientImpl() {}; - public MRClientProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { - RPC.setProtocolEngine(conf, MRClientProtocolPB.class, ProtobufRpcEngine.class); + public MRClientProtocolPBClientImpl(long clientVersion, + InetSocketAddress addr, Configuration conf) throws IOException { + RPC.setProtocolEngine(conf, MRClientProtocolPB.class, + ProtobufRpcEngine2.class); proxy = RPC.getProxy(MRClientProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HSProxies.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HSProxies.java index 3e238cbc0f6..49085232754 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HSProxies.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HSProxies.java @@ -23,7 +23,7 @@ import java.net.InetSocketAddress; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocol; import org.apache.hadoop.mapreduce.v2.api.HSAdminRefreshProtocolPB; @@ -93,7 +93,7 @@ public class HSProxies { private static Object createHSProxy(InetSocketAddress address, Configuration conf, UserGroupInformation ugi, Class xface, int rpcTimeout) throws IOException { - RPC.setProtocolEngine(conf, xface, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, xface, ProtobufRpcEngine2.class); Object proxy = RPC.getProxy(xface, RPC.getProtocolVersion(xface), address, ugi, conf, NetUtils.getDefaultSocketFactory(conf), rpcTimeout); return proxy; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/server/HSAdminServer.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/server/HSAdminServer.java index aa03bb6940c..85bd563c83f 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/server/HSAdminServer.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/server/HSAdminServer.java @@ -25,7 +25,7 @@ import java.security.PrivilegedExceptionAction; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig; import org.apache.hadoop.security.AccessControlException; @@ -81,7 +81,7 @@ public class HSAdminServer extends AbstractService implements HSAdminProtocol { @Override public void serviceInit(Configuration conf) throws Exception { RPC.setProtocolEngine(conf, RefreshUserMappingsProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); RefreshUserMappingsProtocolServerSideTranslatorPB refreshUserMappingXlator = new RefreshUserMappingsProtocolServerSideTranslatorPB( this); @@ -154,7 +154,7 @@ public class HSAdminServer extends AbstractService implements HSAdminProtocol { private void addProtocol(Configuration conf, Class protocol, BlockingService blockingService) throws IOException { - RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine2.class); clientRpcServer.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, blockingService); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/impl/pb/client/ClientAMProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/impl/pb/client/ClientAMProtocolPBClientImpl.java index 0c1de58902e..79d6773d0ec 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/impl/pb/client/ClientAMProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-services/hadoop-yarn-services-core/src/main/java/org/apache/hadoop/yarn/service/impl/pb/client/ClientAMProtocolPBClientImpl.java @@ -20,7 +20,7 @@ package org.apache.hadoop.yarn.service.impl.pb.client; import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -58,7 +58,7 @@ public class ClientAMProtocolPBClientImpl public ClientAMProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, ClientAMProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); proxy = RPC.getProxy(ClientAMProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationClientProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationClientProtocolPBClientImpl.java index 8c8d7f17fea..70f0ed77041 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationClientProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationClientProtocolPBClientImpl.java @@ -24,7 +24,7 @@ import java.net.InetSocketAddress; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto; import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto; @@ -207,7 +207,7 @@ public class ApplicationClientProtocolPBClientImpl implements ApplicationClientP public ApplicationClientProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, ApplicationClientProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); proxy = RPC.getProxy(ApplicationClientProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationHistoryProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationHistoryProtocolPBClientImpl.java index ceace111ac1..c488164ceb4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationHistoryProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationHistoryProtocolPBClientImpl.java @@ -23,12 +23,11 @@ import java.io.IOException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto; import org.apache.hadoop.security.proto.SecurityProtos.GetDelegationTokenRequestProto; import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto; -import org.apache.hadoop.yarn.api.ApplicationClientProtocolPB; import org.apache.hadoop.yarn.api.ApplicationHistoryProtocol; import org.apache.hadoop.yarn.api.ApplicationHistoryProtocolPB; import org.apache.hadoop.yarn.api.protocolrecords.CancelDelegationTokenRequest; @@ -86,7 +85,7 @@ public class ApplicationHistoryProtocolPBClientImpl implements public ApplicationHistoryProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, ApplicationHistoryProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); proxy = RPC.getProxy(ApplicationHistoryProtocolPB.class, clientVersion, addr, conf); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationMasterProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationMasterProtocolPBClientImpl.java index b4a20af6b60..4525a001024 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationMasterProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ApplicationMasterProtocolPBClientImpl.java @@ -24,7 +24,7 @@ import java.net.InetSocketAddress; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.api.ApplicationMasterProtocol; import org.apache.hadoop.yarn.api.ApplicationMasterProtocolPB; @@ -55,7 +55,8 @@ public class ApplicationMasterProtocolPBClientImpl implements ApplicationMasterP public ApplicationMasterProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { - RPC.setProtocolEngine(conf, ApplicationMasterProtocolPB.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, ApplicationMasterProtocolPB.class, + ProtobufRpcEngine2.class); proxy = (ApplicationMasterProtocolPB) RPC.getProxy(ApplicationMasterProtocolPB.class, clientVersion, addr, conf); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientSCMProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientSCMProtocolPBClientImpl.java index a1c2d5b86ef..7ee70e51f48 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientSCMProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ClientSCMProtocolPBClientImpl.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.api.ClientSCMProtocol; import org.apache.hadoop.yarn.api.ClientSCMProtocolPB; @@ -50,7 +50,7 @@ public class ClientSCMProtocolPBClientImpl implements ClientSCMProtocol, public ClientSCMProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, ClientSCMProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); proxy = RPC.getProxy(ClientSCMProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagementProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagementProtocolPBClientImpl.java index d5c191103c2..86fc398f252 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagementProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/ContainerManagementProtocolPBClientImpl.java @@ -21,7 +21,7 @@ package org.apache.hadoop.yarn.api.impl.pb.client; import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; @@ -106,7 +106,7 @@ public class ContainerManagementProtocolPBClientImpl implements ContainerManagem public ContainerManagementProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, ContainerManagementProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); int expireIntvl = conf.getInt(NM_COMMAND_TIMEOUT, DEFAULT_COMMAND_TIMEOUT); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/CsiAdaptorProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/CsiAdaptorProtocolPBClientImpl.java index 2ab36558f44..9aff674ef32 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/CsiAdaptorProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/api/impl/pb/client/CsiAdaptorProtocolPBClientImpl.java @@ -19,7 +19,7 @@ package org.apache.hadoop.yarn.api.impl.pb.client; import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.api.CsiAdaptorPB; import org.apache.hadoop.yarn.api.CsiAdaptorProtocol; @@ -57,7 +57,7 @@ public class CsiAdaptorProtocolPBClientImpl public CsiAdaptorProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { - RPC.setProtocolEngine(conf, CsiAdaptorPB.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, CsiAdaptorPB.class, ProtobufRpcEngine2.class); this.proxy = RPC.getProxy(CsiAdaptorPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java index 7b48d5f8a72..17571ed03d2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/impl/pb/RpcServerFactoryPBImpl.java @@ -30,7 +30,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.security.token.SecretManager; @@ -165,7 +165,7 @@ public class RpcServerFactoryPBImpl implements RpcServerFactory { private Server createServer(Class pbProtocol, InetSocketAddress addr, Configuration conf, SecretManager secretManager, int numHandlers, BlockingService blockingService, String portRangeConfig) throws IOException { - RPC.setProtocolEngine(conf, pbProtocol, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, pbProtocol, ProtobufRpcEngine2.class); RPC.Server server = new RPC.Builder(conf).setProtocol(pbProtocol) .setInstance(blockingService).setBindAddress(addr.getHostName()) .setPort(addr.getPort()).setNumHandlers(numHandlers).setVerbose(false) diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java index 639017aa622..20729a3cc8a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceManagerAdministrationProtocolPBClientImpl.java @@ -25,7 +25,7 @@ import java.net.InetSocketAddress; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.ProtobufHelper; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -114,7 +114,7 @@ public class ResourceManagerAdministrationProtocolPBClientImpl implements Resour public ResourceManagerAdministrationProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, ResourceManagerAdministrationProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); proxy = (ResourceManagerAdministrationProtocolPB)RPC.getProxy( ResourceManagerAdministrationProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/SCMAdminProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/SCMAdminProtocolPBClientImpl.java index a1ead5b4176..fb7a750852a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/SCMAdminProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/SCMAdminProtocolPBClientImpl.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.server.api.SCMAdminProtocol; import org.apache.hadoop.yarn.server.api.SCMAdminProtocolPB; @@ -45,7 +45,7 @@ public class SCMAdminProtocolPBClientImpl implements SCMAdminProtocol, public SCMAdminProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, SCMAdminProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); proxy = RPC.getProxy(SCMAdminProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java index af75038096c..6d2bb5ddaf1 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/CollectorNodemanagerProtocolPBClientImpl.java @@ -23,7 +23,7 @@ import java.net.InetSocketAddress; import org.apache.hadoop.classification.InterfaceAudience.Private; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.UserGroupInformation; @@ -63,7 +63,7 @@ public class CollectorNodemanagerProtocolPBClientImpl implements public CollectorNodemanagerProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, CollectorNodemanagerProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation ugi = UserGroupInformation.getCurrentUser(); int expireIntvl = conf.getInt(NM_COMMAND_TIMEOUT, DEFAULT_COMMAND_TIMEOUT); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/DistributedSchedulingAMProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/DistributedSchedulingAMProtocolPBClientImpl.java index 4bd803f7555..f2527fc13a2 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/DistributedSchedulingAMProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/DistributedSchedulingAMProtocolPBClientImpl.java @@ -20,7 +20,7 @@ package org.apache.hadoop.yarn.server.api.impl.pb.client; import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.proto.YarnServerCommonServiceProtos; import org.apache.hadoop.yarn.server.api.DistributedSchedulingAMProtocol; @@ -63,7 +63,7 @@ public class DistributedSchedulingAMProtocolPBClientImpl implements public DistributedSchedulingAMProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, DistributedSchedulingAMProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); proxy = RPC.getProxy(DistributedSchedulingAMProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java index 650df85a01e..76622e3a144 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/ResourceTrackerPBClientImpl.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -52,7 +52,8 @@ public class ResourceTrackerPBClientImpl implements ResourceTracker, Closeable { private ResourceTrackerPB proxy; public ResourceTrackerPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { - RPC.setProtocolEngine(conf, ResourceTrackerPB.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, ResourceTrackerPB.class, + ProtobufRpcEngine2.class); proxy = (ResourceTrackerPB)RPC.getProxy( ResourceTrackerPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/SCMUploaderProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/SCMUploaderProtocolPBClientImpl.java index 32f0bce4eb4..d484ac1ab3d 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/SCMUploaderProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/impl/pb/client/SCMUploaderProtocolPBClientImpl.java @@ -23,7 +23,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -50,7 +50,7 @@ public class SCMUploaderProtocolPBClientImpl implements public SCMUploaderProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { RPC.setProtocolEngine(conf, SCMUploaderProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); proxy = RPC.getProxy(SCMUploaderProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java index dedabc07d6c..def59584d0a 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java @@ -25,7 +25,7 @@ import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.Text; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; @@ -262,7 +262,7 @@ public class TestRPC { new DummyContainerManager(), addr, conf, null, 1); server.start(); RPC.setProtocolEngine(conf, ContainerManagementProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); ContainerManagementProtocol proxy = (ContainerManagementProtocol) rpc.getProxy(ContainerManagementProtocol.class, NetUtils.getConnectAddress(server), conf); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java index 154052208c1..124211cc7a4 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/api/impl/pb/client/LocalizationProtocolPBClientImpl.java @@ -22,7 +22,7 @@ import java.io.IOException; import java.net.InetSocketAddress; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.ipc.RPCUtil; @@ -42,7 +42,8 @@ public class LocalizationProtocolPBClientImpl implements LocalizationProtocol, private LocalizationProtocolPB proxy; public LocalizationProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException { - RPC.setProtocolEngine(conf, LocalizationProtocolPB.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, LocalizationProtocolPB.class, + ProtobufRpcEngine2.class); proxy = (LocalizationProtocolPB)RPC.getProxy( LocalizationProtocolPB.class, clientVersion, addr, conf); } diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNMAuditLogger.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNMAuditLogger.java index e810046361d..3ddd05a5ee8 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNMAuditLogger.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/test/java/org/apache/hadoop/yarn/server/nodemanager/TestNMAuditLogger.java @@ -29,7 +29,7 @@ import org.apache.hadoop.thirdparty.protobuf.RpcController; import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.ClientId; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.TestRPC.TestImpl; @@ -220,7 +220,7 @@ public class TestNMAuditLogger { @Test public void testNMAuditLoggerWithIP() throws Exception { Configuration conf = new Configuration(); - RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, TestRpcService.class, ProtobufRpcEngine2.class); // Create server side implementation MyTestRPCServer serverImpl = new MyTestRPCServer(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java index 6ad4e3ab8de..44ffd1778eb 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/AdminService.java @@ -41,7 +41,7 @@ import org.apache.hadoop.ha.ServiceFailedException; import org.apache.hadoop.ha.proto.HAServiceProtocolProtos; import org.apache.hadoop.ha.protocolPB.HAServiceProtocolPB; import org.apache.hadoop.ha.protocolPB.HAServiceProtocolServerSideTranslatorPB; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RPC.Server; import org.apache.hadoop.ipc.StandbyException; @@ -201,7 +201,7 @@ public class AdminService extends CompositeService implements if (rm.getRMContext().isHAEnabled()) { RPC.setProtocolEngine(conf, HAServiceProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); HAServiceProtocolServerSideTranslatorPB haServiceProtocolXlator = new HAServiceProtocolServerSideTranslatorPB(this); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestOpportunisticContainerAllocatorAMService.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestOpportunisticContainerAllocatorAMService.java index 5c5cb25fd62..901dc8a1430 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestOpportunisticContainerAllocatorAMService.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestOpportunisticContainerAllocatorAMService.java @@ -19,7 +19,7 @@ package org.apache.hadoop.yarn.server.resourcemanager; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.net.NetUtils; @@ -1045,7 +1045,7 @@ public class TestOpportunisticContainerAllocatorAMService { // Verify that the OpportunisticContainerAllocatorAMSercvice can handle // vanilla ApplicationMasterProtocol clients RPC.setProtocolEngine(conf, ApplicationMasterProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); ApplicationMasterProtocolPB ampProxy = RPC.getProxy(ApplicationMasterProtocolPB .class, 1, NetUtils.getConnectAddress(server), conf); @@ -1080,7 +1080,7 @@ public class TestOpportunisticContainerAllocatorAMService { // Verify that the DistrubutedSchedulingService can handle the // DistributedSchedulingAMProtocol clients as well RPC.setProtocolEngine(conf, DistributedSchedulingAMProtocolPB.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); DistributedSchedulingAMProtocolPB dsProxy = RPC.getProxy(DistributedSchedulingAMProtocolPB .class, 1, NetUtils.getConnectAddress(server), conf); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAuditLogger.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAuditLogger.java index 282ff111522..e8a532d1baf 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAuditLogger.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestRMAuditLogger.java @@ -31,7 +31,7 @@ import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ipc.CallerContext; import org.apache.hadoop.ipc.ClientId; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.Server; import org.apache.hadoop.ipc.TestRPC.TestImpl; @@ -420,7 +420,7 @@ public class TestRMAuditLogger { public void testRMAuditLoggerWithIP() throws Exception { Configuration conf = new Configuration(); RPC.setProtocolEngine(conf, TestRpcService.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); // Create server side implementation MyTestRPCServer serverImpl = new MyTestRPCServer(); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java index 8734d9f36f9..03b68944b48 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/security/TestClientToAMTokens.java @@ -23,7 +23,7 @@ import org.apache.hadoop.thirdparty.protobuf.RpcController; import org.apache.hadoop.thirdparty.protobuf.ServiceException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; -import org.apache.hadoop.ipc.ProtobufRpcEngine; +import org.apache.hadoop.ipc.ProtobufRpcEngine2; import org.apache.hadoop.ipc.ProtocolInfo; import org.apache.hadoop.ipc.RPC; import org.apache.hadoop.ipc.RemoteException; @@ -160,7 +160,7 @@ public class TestClientToAMTokens extends ParameterizedSchedulerTestBase { Configuration conf = getConfig(); // Set RPC engine to protobuf RPC engine RPC.setProtocolEngine(conf, CustomProtocol.class, - ProtobufRpcEngine.class); + ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); BlockingService service = TestRpcServiceProtos.CustomProto @@ -194,7 +194,7 @@ public class TestClientToAMTokens extends ParameterizedSchedulerTestBase { conf.set(CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHENTICATION, "kerberos"); // Set RPC engine to protobuf RPC engine - RPC.setProtocolEngine(conf, CustomProtocol.class, ProtobufRpcEngine.class); + RPC.setProtocolEngine(conf, CustomProtocol.class, ProtobufRpcEngine2.class); UserGroupInformation.setConfiguration(conf); ContainerManagementProtocol containerManager =