diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 471027762ef..04139df5296 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -89,6 +89,9 @@ Trunk (unreleased changes)
HADOOP-7761. Improve the performance of raw comparisons. (todd)
+ HADOOP-7773. Add support for protocol buffer based RPC engine.
+ (suresh)
+
Release 0.23.0 - Unreleased
INCOMPATIBLE CHANGES
diff --git a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
index 48595ff94f9..3e2d43e67ef 100644
--- a/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
+++ b/hadoop-common-project/hadoop-common/dev-support/findbugsExcludeFile.xml
@@ -270,4 +270,8 @@
+
+
+
+
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
new file mode 100644
index 00000000000..aec56a9d57a
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
@@ -0,0 +1,389 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.ipc;
+
+import java.io.Closeable;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.IOException;
+import java.lang.reflect.InvocationHandler;
+import java.lang.reflect.Method;
+import java.lang.reflect.Proxy;
+import java.net.InetSocketAddress;
+import java.util.Map;
+import java.util.concurrent.ConcurrentHashMap;
+
+import javax.net.SocketFactory;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.classification.InterfaceAudience;
+import org.apache.hadoop.classification.InterfaceStability;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto;
+import org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto;
+import org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto;
+import org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.token.SecretManager;
+import org.apache.hadoop.security.token.TokenIdentifier;
+import org.apache.hadoop.util.StringUtils;
+
+import com.google.common.annotations.VisibleForTesting;
+import com.google.protobuf.BlockingService;
+import com.google.protobuf.Descriptors.MethodDescriptor;
+import com.google.protobuf.InvalidProtocolBufferException;
+import com.google.protobuf.Message;
+import com.google.protobuf.ServiceException;
+
+/**
+ * RPC Engine for for protobuf based RPCs.
+ */
+@InterfaceStability.Evolving
+public class ProtobufRpcEngine implements RpcEngine {
+ private static final Log LOG = LogFactory.getLog(ProtobufRpcEngine.class);
+
+ private static final ClientCache CLIENTS = new ClientCache();
+
+ @Override
+ @SuppressWarnings("unchecked")
+ public ProtocolProxy getProxy(Class protocol, long clientVersion,
+ InetSocketAddress addr, UserGroupInformation ticket, Configuration conf,
+ SocketFactory factory, int rpcTimeout) throws IOException {
+
+ return new ProtocolProxy(protocol, (T) Proxy.newProxyInstance(protocol
+ .getClassLoader(), new Class[] { protocol }, new Invoker(protocol,
+ addr, ticket, conf, factory, rpcTimeout)), false);
+ }
+
+ private static class Invoker implements InvocationHandler, Closeable {
+ private Map returnTypes = new ConcurrentHashMap();
+ private boolean isClosed = false;
+ private Client.ConnectionId remoteId;
+ private Client client;
+
+ public Invoker(Class> protocol, InetSocketAddress addr,
+ UserGroupInformation ticket, Configuration conf, SocketFactory factory,
+ int rpcTimeout) throws IOException {
+ this.remoteId = Client.ConnectionId.getConnectionId(addr, protocol,
+ ticket, rpcTimeout, conf);
+ this.client = CLIENTS.getClient(conf, factory,
+ RpcResponseWritable.class);
+ }
+
+ private HadoopRpcRequestProto constructRpcRequest(Method method,
+ Object[] params) throws ServiceException {
+ HadoopRpcRequestProto rpcRequest;
+ HadoopRpcRequestProto.Builder builder = HadoopRpcRequestProto
+ .newBuilder();
+ builder.setMethodName(method.getName());
+
+ if (params.length != 2) { // RpcController + Message
+ throw new ServiceException("Too many parameters for request. Method: ["
+ + method.getName() + "]" + ", Expected: 2, Actual: "
+ + params.length);
+ }
+ if (params[1] == null) {
+ throw new ServiceException("null param while calling Method: ["
+ + method.getName() + "]");
+ }
+
+ Message param = (Message) params[1];
+ builder.setRequest(param.toByteString());
+ rpcRequest = builder.build();
+ return rpcRequest;
+ }
+
+ /**
+ * This is the client side invoker of RPC method. It only throws
+ * ServiceException, since the invocation proxy expects only
+ * ServiceException to be thrown by the method in case protobuf service.
+ *
+ * ServiceException has the following causes:
+ *
+ * - Exceptions encountered in this methods are thrown as
+ * RpcClientException, wrapped in RemoteException
+ * - Remote exceptions are thrown wrapped in RemoteException
+ *
+ *
+ * Note that the client calling protobuf RPC methods, must handle
+ * ServiceException by getting the cause from the ServiceException. If the
+ * cause is RemoteException, then unwrap it to get the exception thrown by
+ * the server.
+ */
+ @Override
+ public Object invoke(Object proxy, Method method, Object[] args)
+ throws ServiceException {
+ long startTime = 0;
+ if (LOG.isDebugEnabled()) {
+ startTime = System.currentTimeMillis();
+ }
+
+ HadoopRpcRequestProto rpcRequest = constructRpcRequest(method, args);
+ RpcResponseWritable val = null;
+ try {
+ val = (RpcResponseWritable) client.call(
+ new RpcRequestWritable(rpcRequest), remoteId);
+ } catch (Exception e) {
+ RpcClientException ce = new RpcClientException("Client exception", e);
+ throw new ServiceException(getRemoteException(ce));
+ }
+
+ HadoopRpcResponseProto response = val.message;
+ if (LOG.isDebugEnabled()) {
+ long callTime = System.currentTimeMillis() - startTime;
+ LOG.debug("Call: " + method.getName() + " " + callTime);
+ }
+
+ // Wrap the received message
+ ResponseStatus status = response.getStatus();
+ if (status != ResponseStatus.SUCCESS) {
+ RemoteException re = new RemoteException(response.getException()
+ .getExceptionName(), response.getException().getStackTrace());
+ re.fillInStackTrace();
+ throw new ServiceException(re);
+ }
+
+ Message prototype = null;
+ try {
+ prototype = getReturnProtoType(method);
+ } catch (Exception e) {
+ throw new ServiceException(e);
+ }
+ Message returnMessage;
+ try {
+ returnMessage = prototype.newBuilderForType()
+ .mergeFrom(response.getResponse()).build();
+ } catch (InvalidProtocolBufferException e) {
+ RpcClientException ce = new RpcClientException("Client exception", e);
+ throw new ServiceException(getRemoteException(ce));
+ }
+ return returnMessage;
+ }
+
+ public void close() throws IOException {
+ if (!isClosed) {
+ isClosed = true;
+ CLIENTS.stopClient(client);
+ }
+ }
+
+ private Message getReturnProtoType(Method method) throws Exception {
+ if (returnTypes.containsKey(method.getName())) {
+ return returnTypes.get(method.getName());
+ }
+
+ Class> returnType = method.getReturnType();
+ Method newInstMethod = returnType.getMethod("getDefaultInstance");
+ newInstMethod.setAccessible(true);
+ Message prototype = (Message) newInstMethod.invoke(null, (Object[]) null);
+ returnTypes.put(method.getName(), prototype);
+ return prototype;
+ }
+ }
+
+ @Override
+ public Object[] call(Method method, Object[][] params,
+ InetSocketAddress[] addrs, UserGroupInformation ticket, Configuration conf) {
+ throw new UnsupportedOperationException();
+ }
+
+ /**
+ * Writable Wrapper for Protocol Buffer Requests
+ */
+ private static class RpcRequestWritable implements Writable {
+ HadoopRpcRequestProto message;
+
+ @SuppressWarnings("unused")
+ public RpcRequestWritable() {
+ }
+
+ RpcRequestWritable(HadoopRpcRequestProto message) {
+ this.message = message;
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ out.writeInt(message.toByteArray().length);
+ out.write(message.toByteArray());
+ }
+
+ @Override
+ public void readFields(DataInput in) throws IOException {
+ int length = in.readInt();
+ byte[] bytes = new byte[length];
+ in.readFully(bytes);
+ message = HadoopRpcRequestProto.parseFrom(bytes);
+ }
+ }
+
+ /**
+ * Writable Wrapper for Protocol Buffer Responses
+ */
+ private static class RpcResponseWritable implements Writable {
+ HadoopRpcResponseProto message;
+
+ @SuppressWarnings("unused")
+ public RpcResponseWritable() {
+ }
+
+ public RpcResponseWritable(HadoopRpcResponseProto message) {
+ this.message = message;
+ }
+
+ @Override
+ public void write(DataOutput out) throws IOException {
+ out.writeInt(message.toByteArray().length);
+ out.write(message.toByteArray());
+ }
+
+ @Override
+ public void readFields(DataInput in) throws IOException {
+ int length = in.readInt();
+ byte[] bytes = new byte[length];
+ in.readFully(bytes);
+ message = HadoopRpcResponseProto.parseFrom(bytes);
+ }
+ }
+
+ @VisibleForTesting
+ @InterfaceAudience.Private
+ @InterfaceStability.Unstable
+ static Client getClient(Configuration conf) {
+ return CLIENTS.getClient(conf, SocketFactory.getDefault(),
+ RpcResponseWritable.class);
+ }
+
+
+ @Override
+ public RPC.Server getServer(Class> protocol, Object instance,
+ String bindAddress, int port, int numHandlers, int numReaders,
+ int queueSizePerHandler, boolean verbose, Configuration conf,
+ SecretManager extends TokenIdentifier> secretManager)
+ throws IOException {
+ return new Server(instance, conf, bindAddress, port, numHandlers,
+ numReaders, queueSizePerHandler, verbose, secretManager);
+ }
+
+ private static RemoteException getRemoteException(Exception e) {
+ return new RemoteException(e.getClass().getName(),
+ StringUtils.stringifyException(e));
+ }
+
+ public static class Server extends RPC.Server {
+ private BlockingService service;
+ private boolean verbose;
+
+ private static String classNameBase(String className) {
+ String[] names = className.split("\\.", -1);
+ if (names == null || names.length == 0) {
+ return className;
+ }
+ return names[names.length - 1];
+ }
+
+ /**
+ * Construct an RPC server.
+ *
+ * @param instance the instance whose methods will be called
+ * @param conf the configuration to use
+ * @param bindAddress the address to bind on to listen for connection
+ * @param port the port to listen for connections on
+ * @param numHandlers the number of method handler threads to run
+ * @param verbose whether each call should be logged
+ */
+ public Server(Object instance, Configuration conf, String bindAddress,
+ int port, int numHandlers, int numReaders, int queueSizePerHandler,
+ boolean verbose, SecretManager extends TokenIdentifier> secretManager)
+ throws IOException {
+ super(bindAddress, port, RpcRequestWritable.class, numHandlers,
+ numReaders, queueSizePerHandler, conf, classNameBase(instance
+ .getClass().getName()), secretManager);
+ this.service = (BlockingService) instance;
+ this.verbose = verbose;
+ }
+
+ /**
+ * This is a server side method, which is invoked over RPC. On success
+ * the return response has protobuf response payload. On failure, the
+ * exception name and the stack trace are return in the resposne. See {@link HadoopRpcResponseProto}
+ *
+ * In this method there three types of exceptions possible and they are
+ * returned in response as follows.
+ *
+ * - Exceptions encountered in this method that are returned as {@link RpcServerException}
+ * - Exceptions thrown by the service is wrapped in ServiceException. In that
+ * this method returns in response the exception thrown by the service.
+ * - Other exceptions thrown by the service. They are returned as
+ * it is.
+ *
+ */
+ @Override
+ public Writable call(String protocol, Writable writableRequest,
+ long receiveTime) throws IOException {
+ RpcRequestWritable request = (RpcRequestWritable) writableRequest;
+ HadoopRpcRequestProto rpcRequest = request.message;
+ String methodName = rpcRequest.getMethodName();
+ if (verbose)
+ LOG.info("Call: protocol=" + protocol + ", method=" + methodName);
+ MethodDescriptor methodDescriptor = service.getDescriptorForType()
+ .findMethodByName(methodName);
+ if (methodDescriptor == null) {
+ String msg = "Unknown method " + methodName + " called on " + protocol
+ + " protocol.";
+ LOG.warn(msg);
+ return handleException(new RpcServerException(msg));
+ }
+ Message prototype = service.getRequestPrototype(methodDescriptor);
+ Message param = prototype.newBuilderForType()
+ .mergeFrom(rpcRequest.getRequest()).build();
+ Message result;
+ try {
+ result = service.callBlockingMethod(methodDescriptor, null, param);
+ } catch (ServiceException e) {
+ Throwable cause = e.getCause();
+ return handleException(cause != null ? cause : e);
+ } catch (Exception e) {
+ return handleException(e);
+ }
+
+ HadoopRpcResponseProto response = constructProtoSpecificRpcSuccessResponse(result);
+ return new RpcResponseWritable(response);
+ }
+
+ private RpcResponseWritable handleException(Throwable e) {
+ HadoopRpcExceptionProto exception = HadoopRpcExceptionProto.newBuilder()
+ .setExceptionName(e.getClass().getName())
+ .setStackTrace(StringUtils.stringifyException(e)).build();
+ HadoopRpcResponseProto response = HadoopRpcResponseProto.newBuilder()
+ .setStatus(ResponseStatus.ERRROR).setException(exception).build();
+ return new RpcResponseWritable(response);
+ }
+
+ private HadoopRpcResponseProto constructProtoSpecificRpcSuccessResponse(
+ Message message) {
+ HadoopRpcResponseProto res = HadoopRpcResponseProto.newBuilder()
+ .setResponse(message.toByteString())
+ .setStatus(ResponseStatus.SUCCESS)
+ .build();
+ return res;
+ }
+ }
+}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java
index d7742fee5a3..721b10ce96f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java
@@ -25,10 +25,9 @@ public class RpcServerException extends RpcException {
/**
* Constructs exception with the specified detail message.
- *
- * @param messages detailed message.
+ * @param message detailed message.
*/
- RpcServerException(final String message) {
+ public RpcServerException(final String message) {
super(message);
}
@@ -36,12 +35,11 @@ public class RpcServerException extends RpcException {
* Constructs exception with the specified detail message and cause.
*
* @param message message.
- * @param cause that cause this exception
* @param cause the cause (can be retried by the {@link #getCause()} method).
* (A null value is permitted, and indicates that the cause
* is nonexistent or unknown.)
*/
- RpcServerException(final String message, final Throwable cause) {
+ public RpcServerException(final String message, final Throwable cause) {
super(message, cause);
}
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/HadoopRpcProtos.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/HadoopRpcProtos.java
new file mode 100644
index 00000000000..2086f3d86b6
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/HadoopRpcProtos.java
@@ -0,0 +1,1759 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: hadoop_rpc.proto
+
+package org.apache.hadoop.ipc.protobuf;
+
+public final class HadoopRpcProtos {
+ private HadoopRpcProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface HadoopRpcRequestProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string methodName = 1;
+ boolean hasMethodName();
+ String getMethodName();
+
+ // optional bytes request = 2;
+ boolean hasRequest();
+ com.google.protobuf.ByteString getRequest();
+ }
+ public static final class HadoopRpcRequestProto extends
+ com.google.protobuf.GeneratedMessage
+ implements HadoopRpcRequestProtoOrBuilder {
+ // Use HadoopRpcRequestProto.newBuilder() to construct.
+ private HadoopRpcRequestProto(Builder builder) {
+ super(builder);
+ }
+ private HadoopRpcRequestProto(boolean noInit) {}
+
+ private static final HadoopRpcRequestProto defaultInstance;
+ public static HadoopRpcRequestProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public HadoopRpcRequestProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcRequestProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcRequestProto_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required string methodName = 1;
+ public static final int METHODNAME_FIELD_NUMBER = 1;
+ private java.lang.Object methodName_;
+ public boolean hasMethodName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getMethodName() {
+ java.lang.Object ref = methodName_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ methodName_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getMethodNameBytes() {
+ java.lang.Object ref = methodName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ methodName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional bytes request = 2;
+ public static final int REQUEST_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString request_;
+ public boolean hasRequest() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getRequest() {
+ return request_;
+ }
+
+ private void initFields() {
+ methodName_ = "";
+ request_ = com.google.protobuf.ByteString.EMPTY;
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasMethodName()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getMethodNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, request_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getMethodNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, request_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto other = (org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto) obj;
+
+ boolean result = true;
+ result = result && (hasMethodName() == other.hasMethodName());
+ if (hasMethodName()) {
+ result = result && getMethodName()
+ .equals(other.getMethodName());
+ }
+ result = result && (hasRequest() == other.hasRequest());
+ if (hasRequest()) {
+ result = result && getRequest()
+ .equals(other.getRequest());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasMethodName()) {
+ hash = (37 * hash) + METHODNAME_FIELD_NUMBER;
+ hash = (53 * hash) + getMethodName().hashCode();
+ }
+ if (hasRequest()) {
+ hash = (37 * hash) + REQUEST_FIELD_NUMBER;
+ hash = (53 * hash) + getRequest().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcRequestProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcRequestProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ methodName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ request_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto getDefaultInstanceForType() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto build() {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto buildPartial() {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto result = new org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.methodName_ = methodName_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.request_ = request_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto) {
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto other) {
+ if (other == org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.getDefaultInstance()) return this;
+ if (other.hasMethodName()) {
+ setMethodName(other.getMethodName());
+ }
+ if (other.hasRequest()) {
+ setRequest(other.getRequest());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasMethodName()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ methodName_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ request_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required string methodName = 1;
+ private java.lang.Object methodName_ = "";
+ public boolean hasMethodName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getMethodName() {
+ java.lang.Object ref = methodName_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ methodName_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setMethodName(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ methodName_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMethodName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ methodName_ = getDefaultInstance().getMethodName();
+ onChanged();
+ return this;
+ }
+ void setMethodName(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ methodName_ = value;
+ onChanged();
+ }
+
+ // optional bytes request = 2;
+ private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasRequest() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getRequest() {
+ return request_;
+ }
+ public Builder setRequest(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ request_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearRequest() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ request_ = getDefaultInstance().getRequest();
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:HadoopRpcRequestProto)
+ }
+
+ static {
+ defaultInstance = new HadoopRpcRequestProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:HadoopRpcRequestProto)
+ }
+
+ public interface HadoopRpcExceptionProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // optional string exceptionName = 1;
+ boolean hasExceptionName();
+ String getExceptionName();
+
+ // optional string stackTrace = 2;
+ boolean hasStackTrace();
+ String getStackTrace();
+ }
+ public static final class HadoopRpcExceptionProto extends
+ com.google.protobuf.GeneratedMessage
+ implements HadoopRpcExceptionProtoOrBuilder {
+ // Use HadoopRpcExceptionProto.newBuilder() to construct.
+ private HadoopRpcExceptionProto(Builder builder) {
+ super(builder);
+ }
+ private HadoopRpcExceptionProto(boolean noInit) {}
+
+ private static final HadoopRpcExceptionProto defaultInstance;
+ public static HadoopRpcExceptionProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public HadoopRpcExceptionProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcExceptionProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcExceptionProto_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // optional string exceptionName = 1;
+ public static final int EXCEPTIONNAME_FIELD_NUMBER = 1;
+ private java.lang.Object exceptionName_;
+ public boolean hasExceptionName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getExceptionName() {
+ java.lang.Object ref = exceptionName_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ exceptionName_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getExceptionNameBytes() {
+ java.lang.Object ref = exceptionName_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ exceptionName_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ // optional string stackTrace = 2;
+ public static final int STACKTRACE_FIELD_NUMBER = 2;
+ private java.lang.Object stackTrace_;
+ public boolean hasStackTrace() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public String getStackTrace() {
+ java.lang.Object ref = stackTrace_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ stackTrace_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getStackTraceBytes() {
+ java.lang.Object ref = stackTrace_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ stackTrace_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ exceptionName_ = "";
+ stackTrace_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getExceptionNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, getStackTraceBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getExceptionNameBytes());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, getStackTraceBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto other = (org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto) obj;
+
+ boolean result = true;
+ result = result && (hasExceptionName() == other.hasExceptionName());
+ if (hasExceptionName()) {
+ result = result && getExceptionName()
+ .equals(other.getExceptionName());
+ }
+ result = result && (hasStackTrace() == other.hasStackTrace());
+ if (hasStackTrace()) {
+ result = result && getStackTrace()
+ .equals(other.getStackTrace());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasExceptionName()) {
+ hash = (37 * hash) + EXCEPTIONNAME_FIELD_NUMBER;
+ hash = (53 * hash) + getExceptionName().hashCode();
+ }
+ if (hasStackTrace()) {
+ hash = (37 * hash) + STACKTRACE_FIELD_NUMBER;
+ hash = (53 * hash) + getStackTrace().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcExceptionProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcExceptionProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ exceptionName_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ stackTrace_ = "";
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto getDefaultInstanceForType() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto build() {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto buildPartial() {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto result = new org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.exceptionName_ = exceptionName_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.stackTrace_ = stackTrace_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto) {
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto other) {
+ if (other == org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance()) return this;
+ if (other.hasExceptionName()) {
+ setExceptionName(other.getExceptionName());
+ }
+ if (other.hasStackTrace()) {
+ setStackTrace(other.getStackTrace());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ exceptionName_ = input.readBytes();
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ stackTrace_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // optional string exceptionName = 1;
+ private java.lang.Object exceptionName_ = "";
+ public boolean hasExceptionName() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getExceptionName() {
+ java.lang.Object ref = exceptionName_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ exceptionName_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setExceptionName(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ exceptionName_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearExceptionName() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ exceptionName_ = getDefaultInstance().getExceptionName();
+ onChanged();
+ return this;
+ }
+ void setExceptionName(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ exceptionName_ = value;
+ onChanged();
+ }
+
+ // optional string stackTrace = 2;
+ private java.lang.Object stackTrace_ = "";
+ public boolean hasStackTrace() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public String getStackTrace() {
+ java.lang.Object ref = stackTrace_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ stackTrace_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setStackTrace(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ stackTrace_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearStackTrace() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ stackTrace_ = getDefaultInstance().getStackTrace();
+ onChanged();
+ return this;
+ }
+ void setStackTrace(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000002;
+ stackTrace_ = value;
+ onChanged();
+ }
+
+ // @@protoc_insertion_point(builder_scope:HadoopRpcExceptionProto)
+ }
+
+ static {
+ defaultInstance = new HadoopRpcExceptionProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:HadoopRpcExceptionProto)
+ }
+
+ public interface HadoopRpcResponseProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required .HadoopRpcResponseProto.ResponseStatus status = 1;
+ boolean hasStatus();
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus getStatus();
+
+ // optional bytes response = 2;
+ boolean hasResponse();
+ com.google.protobuf.ByteString getResponse();
+
+ // optional .HadoopRpcExceptionProto exception = 3;
+ boolean hasException();
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto getException();
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder getExceptionOrBuilder();
+ }
+ public static final class HadoopRpcResponseProto extends
+ com.google.protobuf.GeneratedMessage
+ implements HadoopRpcResponseProtoOrBuilder {
+ // Use HadoopRpcResponseProto.newBuilder() to construct.
+ private HadoopRpcResponseProto(Builder builder) {
+ super(builder);
+ }
+ private HadoopRpcResponseProto(boolean noInit) {}
+
+ private static final HadoopRpcResponseProto defaultInstance;
+ public static HadoopRpcResponseProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public HadoopRpcResponseProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcResponseProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcResponseProto_fieldAccessorTable;
+ }
+
+ public enum ResponseStatus
+ implements com.google.protobuf.ProtocolMessageEnum {
+ SUCCESS(0, 1),
+ ERRROR(1, 2),
+ ;
+
+ public static final int SUCCESS_VALUE = 1;
+ public static final int ERRROR_VALUE = 2;
+
+
+ public final int getNumber() { return value; }
+
+ public static ResponseStatus valueOf(int value) {
+ switch (value) {
+ case 1: return SUCCESS;
+ case 2: return ERRROR;
+ default: return null;
+ }
+ }
+
+ public static com.google.protobuf.Internal.EnumLiteMap
+ internalGetValueMap() {
+ return internalValueMap;
+ }
+ private static com.google.protobuf.Internal.EnumLiteMap
+ internalValueMap =
+ new com.google.protobuf.Internal.EnumLiteMap() {
+ public ResponseStatus findValueByNumber(int number) {
+ return ResponseStatus.valueOf(number);
+ }
+ };
+
+ public final com.google.protobuf.Descriptors.EnumValueDescriptor
+ getValueDescriptor() {
+ return getDescriptor().getValues().get(index);
+ }
+ public final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+ public static final com.google.protobuf.Descriptors.EnumDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.getDescriptor().getEnumTypes().get(0);
+ }
+
+ private static final ResponseStatus[] VALUES = {
+ SUCCESS, ERRROR,
+ };
+
+ public static ResponseStatus valueOf(
+ com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
+ if (desc.getType() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "EnumValueDescriptor is not for this type.");
+ }
+ return VALUES[desc.getIndex()];
+ }
+
+ private final int index;
+ private final int value;
+
+ private ResponseStatus(int index, int value) {
+ this.index = index;
+ this.value = value;
+ }
+
+ // @@protoc_insertion_point(enum_scope:HadoopRpcResponseProto.ResponseStatus)
+ }
+
+ private int bitField0_;
+ // required .HadoopRpcResponseProto.ResponseStatus status = 1;
+ public static final int STATUS_FIELD_NUMBER = 1;
+ private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus status_;
+ public boolean hasStatus() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus getStatus() {
+ return status_;
+ }
+
+ // optional bytes response = 2;
+ public static final int RESPONSE_FIELD_NUMBER = 2;
+ private com.google.protobuf.ByteString response_;
+ public boolean hasResponse() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getResponse() {
+ return response_;
+ }
+
+ // optional .HadoopRpcExceptionProto exception = 3;
+ public static final int EXCEPTION_FIELD_NUMBER = 3;
+ private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto exception_;
+ public boolean hasException() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto getException() {
+ return exception_;
+ }
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder getExceptionOrBuilder() {
+ return exception_;
+ }
+
+ private void initFields() {
+ status_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.SUCCESS;
+ response_ = com.google.protobuf.ByteString.EMPTY;
+ exception_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasStatus()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeEnum(1, status_.getNumber());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ output.writeBytes(2, response_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ output.writeMessage(3, exception_);
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeEnumSize(1, status_.getNumber());
+ }
+ if (((bitField0_ & 0x00000002) == 0x00000002)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(2, response_);
+ }
+ if (((bitField0_ & 0x00000004) == 0x00000004)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeMessageSize(3, exception_);
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto other = (org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto) obj;
+
+ boolean result = true;
+ result = result && (hasStatus() == other.hasStatus());
+ if (hasStatus()) {
+ result = result &&
+ (getStatus() == other.getStatus());
+ }
+ result = result && (hasResponse() == other.hasResponse());
+ if (hasResponse()) {
+ result = result && getResponse()
+ .equals(other.getResponse());
+ }
+ result = result && (hasException() == other.hasException());
+ if (hasException()) {
+ result = result && getException()
+ .equals(other.getException());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasStatus()) {
+ hash = (37 * hash) + STATUS_FIELD_NUMBER;
+ hash = (53 * hash) + hashEnum(getStatus());
+ }
+ if (hasResponse()) {
+ hash = (37 * hash) + RESPONSE_FIELD_NUMBER;
+ hash = (53 * hash) + getResponse().hashCode();
+ }
+ if (hasException()) {
+ hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
+ hash = (53 * hash) + getException().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcResponseProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcResponseProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ getExceptionFieldBuilder();
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ status_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.SUCCESS;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ response_ = com.google.protobuf.ByteString.EMPTY;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ if (exceptionBuilder_ == null) {
+ exception_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
+ } else {
+ exceptionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto getDefaultInstanceForType() {
+ return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto build() {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto buildPartial() {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto result = new org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.status_ = status_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.response_ = response_;
+ if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
+ to_bitField0_ |= 0x00000004;
+ }
+ if (exceptionBuilder_ == null) {
+ result.exception_ = exception_;
+ } else {
+ result.exception_ = exceptionBuilder_.build();
+ }
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto) {
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto other) {
+ if (other == org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.getDefaultInstance()) return this;
+ if (other.hasStatus()) {
+ setStatus(other.getStatus());
+ }
+ if (other.hasResponse()) {
+ setResponse(other.getResponse());
+ }
+ if (other.hasException()) {
+ mergeException(other.getException());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasStatus()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 8: {
+ int rawValue = input.readEnum();
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus value = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.valueOf(rawValue);
+ if (value == null) {
+ unknownFields.mergeVarintField(1, rawValue);
+ } else {
+ bitField0_ |= 0x00000001;
+ status_ = value;
+ }
+ break;
+ }
+ case 18: {
+ bitField0_ |= 0x00000002;
+ response_ = input.readBytes();
+ break;
+ }
+ case 26: {
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder subBuilder = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.newBuilder();
+ if (hasException()) {
+ subBuilder.mergeFrom(getException());
+ }
+ input.readMessage(subBuilder, extensionRegistry);
+ setException(subBuilder.buildPartial());
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required .HadoopRpcResponseProto.ResponseStatus status = 1;
+ private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus status_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.SUCCESS;
+ public boolean hasStatus() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus getStatus() {
+ return status_;
+ }
+ public Builder setStatus(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ status_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearStatus() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ status_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.SUCCESS;
+ onChanged();
+ return this;
+ }
+
+ // optional bytes response = 2;
+ private com.google.protobuf.ByteString response_ = com.google.protobuf.ByteString.EMPTY;
+ public boolean hasResponse() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ public com.google.protobuf.ByteString getResponse() {
+ return response_;
+ }
+ public Builder setResponse(com.google.protobuf.ByteString value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000002;
+ response_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearResponse() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ response_ = getDefaultInstance().getResponse();
+ onChanged();
+ return this;
+ }
+
+ // optional .HadoopRpcExceptionProto exception = 3;
+ private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto exception_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder> exceptionBuilder_;
+ public boolean hasException() {
+ return ((bitField0_ & 0x00000004) == 0x00000004);
+ }
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto getException() {
+ if (exceptionBuilder_ == null) {
+ return exception_;
+ } else {
+ return exceptionBuilder_.getMessage();
+ }
+ }
+ public Builder setException(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto value) {
+ if (exceptionBuilder_ == null) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ exception_ = value;
+ onChanged();
+ } else {
+ exceptionBuilder_.setMessage(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ public Builder setException(
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder builderForValue) {
+ if (exceptionBuilder_ == null) {
+ exception_ = builderForValue.build();
+ onChanged();
+ } else {
+ exceptionBuilder_.setMessage(builderForValue.build());
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ public Builder mergeException(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto value) {
+ if (exceptionBuilder_ == null) {
+ if (((bitField0_ & 0x00000004) == 0x00000004) &&
+ exception_ != org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance()) {
+ exception_ =
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.newBuilder(exception_).mergeFrom(value).buildPartial();
+ } else {
+ exception_ = value;
+ }
+ onChanged();
+ } else {
+ exceptionBuilder_.mergeFrom(value);
+ }
+ bitField0_ |= 0x00000004;
+ return this;
+ }
+ public Builder clearException() {
+ if (exceptionBuilder_ == null) {
+ exception_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
+ onChanged();
+ } else {
+ exceptionBuilder_.clear();
+ }
+ bitField0_ = (bitField0_ & ~0x00000004);
+ return this;
+ }
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder getExceptionBuilder() {
+ bitField0_ |= 0x00000004;
+ onChanged();
+ return getExceptionFieldBuilder().getBuilder();
+ }
+ public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder getExceptionOrBuilder() {
+ if (exceptionBuilder_ != null) {
+ return exceptionBuilder_.getMessageOrBuilder();
+ } else {
+ return exception_;
+ }
+ }
+ private com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder>
+ getExceptionFieldBuilder() {
+ if (exceptionBuilder_ == null) {
+ exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder>(
+ exception_,
+ getParentForChildren(),
+ isClean());
+ exception_ = null;
+ }
+ return exceptionBuilder_;
+ }
+
+ // @@protoc_insertion_point(builder_scope:HadoopRpcResponseProto)
+ }
+
+ static {
+ defaultInstance = new HadoopRpcResponseProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:HadoopRpcResponseProto)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_HadoopRpcRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_HadoopRpcRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_HadoopRpcExceptionProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_HadoopRpcExceptionProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_HadoopRpcResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_HadoopRpcResponseProto_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\020hadoop_rpc.proto\"<\n\025HadoopRpcRequestPr" +
+ "oto\022\022\n\nmethodName\030\001 \002(\t\022\017\n\007request\030\002 \001(\014" +
+ "\"D\n\027HadoopRpcExceptionProto\022\025\n\rexception" +
+ "Name\030\001 \001(\t\022\022\n\nstackTrace\030\002 \001(\t\"\272\001\n\026Hadoo" +
+ "pRpcResponseProto\0226\n\006status\030\001 \002(\0162&.Hado" +
+ "opRpcResponseProto.ResponseStatus\022\020\n\010res" +
+ "ponse\030\002 \001(\014\022+\n\texception\030\003 \001(\0132\030.HadoopR" +
+ "pcExceptionProto\")\n\016ResponseStatus\022\013\n\007SU" +
+ "CCESS\020\001\022\n\n\006ERRROR\020\002B4\n\036org.apache.hadoop" +
+ ".ipc.protobufB\017HadoopRpcProtos\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_HadoopRpcRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_HadoopRpcRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_HadoopRpcRequestProto_descriptor,
+ new java.lang.String[] { "MethodName", "Request", },
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.class,
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.Builder.class);
+ internal_static_HadoopRpcExceptionProto_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_HadoopRpcExceptionProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_HadoopRpcExceptionProto_descriptor,
+ new java.lang.String[] { "ExceptionName", "StackTrace", },
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.class,
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder.class);
+ internal_static_HadoopRpcResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(2);
+ internal_static_HadoopRpcResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_HadoopRpcResponseProto_descriptor,
+ new java.lang.String[] { "Status", "Response", "Exception", },
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.Builder.class);
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/hadoop-common-project/hadoop-common/src/proto/hadoop_rpc.proto b/hadoop-common-project/hadoop-common/src/proto/hadoop_rpc.proto
new file mode 100644
index 00000000000..d37455434d8
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/proto/hadoop_rpc.proto
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/**
+ * These are the messages used by Hadoop RPC to marshal the
+ * request and response in the RPC layer.
+ */
+option java_package = "org.apache.hadoop.ipc.protobuf";
+option java_outer_classname = "HadoopRpcProtos";
+option java_generate_equals_and_hash = true;
+
+/**
+ * Message used to marshal the client request
+ * from RPC client to the RPC server.
+ */
+message HadoopRpcRequestProto {
+ /** Name of the RPC method */
+ required string methodName = 1;
+
+ /** Bytes corresponding to the client protobuf request */
+ optional bytes request = 2;
+}
+
+/**
+ * At the RPC layer, this message is used to indicate
+ * the server side exception the the RPC client.
+ *
+ * Hadoop RPC client throws an exception indicated
+ * by exceptionName with the stackTrace.
+ */
+message HadoopRpcExceptionProto {
+ /** Class name of the exception thrown from the server */
+
+ optional string exceptionName = 1;
+ /** Exception stack trace from the server side */
+ optional string stackTrace = 2;
+}
+
+/**
+ * This message is used to marshal the response from
+ * RPC server to the client.
+ */
+message HadoopRpcResponseProto {
+ /** Status of IPC call */
+ enum ResponseStatus {
+ SUCCESS = 1;
+ ERRROR = 2;
+ }
+
+ required ResponseStatus status = 1;
+
+ // Protobuf response payload from the server, when status is SUCCESS.
+ optional bytes response = 2;
+
+ // Exception when status is ERROR or FATAL
+ optional HadoopRpcExceptionProto exception = 3;
+}
+
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
new file mode 100644
index 00000000000..95083ab2faf
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/TestProtoBufRpc.java
@@ -0,0 +1,123 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.ipc;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto;
+import org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto;
+import org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto;
+import org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto;
+import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcProto;
+import org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcProto.BlockingInterface;
+import org.junit.Assert;
+import org.junit.Test;
+
+import com.google.protobuf.BlockingService;
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+/**
+ * Test for testing protocol buffer based RPC mechanism.
+ * This test depends on test.proto definition of types in src/test/proto
+ * and protobuf service definition from src/test/test_rpc_service.proto
+ */
+public class TestProtoBufRpc {
+ public final static String ADDRESS = "0.0.0.0";
+ public final static int PORT = 0;
+
+ public static class ServerImpl implements BlockingInterface {
+
+ @Override
+ public EmptyResponseProto ping(RpcController unused,
+ EmptyRequestProto request) throws ServiceException {
+ return EmptyResponseProto.newBuilder().build();
+ }
+
+ @Override
+ public EchoResponseProto echo(RpcController unused, EchoRequestProto request)
+ throws ServiceException {
+ return EchoResponseProto.newBuilder().setMessage(request.getMessage())
+ .build();
+ }
+
+ @Override
+ public EmptyResponseProto error(RpcController unused,
+ EmptyRequestProto request) throws ServiceException {
+ throw new ServiceException("error", new RpcServerException("error"));
+ }
+ }
+
+ private static RPC.Server startRPCServer(Configuration conf)
+ throws IOException {
+ // Set RPC engine to protobuf RPC engine
+ RPC.setProtocolEngine(conf, BlockingService.class, ProtobufRpcEngine.class);
+
+ // Create server side implementation
+ ServerImpl serverImpl = new ServerImpl();
+ BlockingService service = TestProtobufRpcProto
+ .newReflectiveBlockingService(serverImpl);
+
+ // Get RPC server for serer side implementation
+ RPC.Server server = RPC.getServer(BlockingService.class, service, ADDRESS,
+ PORT, conf);
+ server.start();
+ return server;
+ }
+
+ private static BlockingInterface getClient(Configuration conf,
+ InetSocketAddress addr) throws IOException {
+ // Set RPC engine to protobuf RPC engine
+ RPC.setProtocolEngine(conf, BlockingInterface.class,
+ ProtobufRpcEngine.class);
+ BlockingInterface client = RPC.getProxy(BlockingInterface.class, 0, addr,
+ conf);
+ return client;
+ }
+
+ @Test
+ public void testProtoBufRpc() throws Exception {
+ Configuration conf = new Configuration();
+ RPC.Server server = startRPCServer(conf);
+ BlockingInterface client = getClient(conf, server.getListenerAddress());
+
+ // Test ping method
+ EmptyRequestProto emptyRequest = EmptyRequestProto.newBuilder().build();
+ client.ping(null, emptyRequest);
+
+ // Test echo method
+ EchoRequestProto echoRequest = EchoRequestProto.newBuilder()
+ .setMessage("hello").build();
+ EchoResponseProto echoResponse = client.echo(null, echoRequest);
+ Assert.assertEquals(echoResponse.getMessage(), "hello");
+
+ // Test error method - it should be thrown as RemoteException
+ try {
+ client.error(null, emptyRequest);
+ Assert.fail("Expected exception is not thrown");
+ } catch (ServiceException e) {
+ RemoteException re = (RemoteException)e.getCause();
+ re.printStackTrace();
+ RpcServerException rse = (RpcServerException) re
+ .unwrapRemoteException(RpcServerException.class);
+ rse.printStackTrace();
+ }
+ }
+}
\ No newline at end of file
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestProtos.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestProtos.java
new file mode 100644
index 00000000000..0029d26e84a
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestProtos.java
@@ -0,0 +1,1525 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: test.proto
+
+package org.apache.hadoop.ipc.protobuf;
+
+public final class TestProtos {
+ private TestProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public interface EmptyRequestProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class EmptyRequestProto extends
+ com.google.protobuf.GeneratedMessage
+ implements EmptyRequestProtoOrBuilder {
+ // Use EmptyRequestProto.newBuilder() to construct.
+ private EmptyRequestProto(Builder builder) {
+ super(builder);
+ }
+ private EmptyRequestProto(boolean noInit) {}
+
+ private static final EmptyRequestProto defaultInstance;
+ public static EmptyRequestProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public EmptyRequestProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyRequestProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyRequestProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto getDefaultInstanceForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto build() {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildPartial() {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) {
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other) {
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:EmptyRequestProto)
+ }
+
+ static {
+ defaultInstance = new EmptyRequestProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:EmptyRequestProto)
+ }
+
+ public interface EmptyResponseProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+ }
+ public static final class EmptyResponseProto extends
+ com.google.protobuf.GeneratedMessage
+ implements EmptyResponseProtoOrBuilder {
+ // Use EmptyResponseProto.newBuilder() to construct.
+ private EmptyResponseProto(Builder builder) {
+ super(builder);
+ }
+ private EmptyResponseProto(boolean noInit) {}
+
+ private static final EmptyResponseProto defaultInstance;
+ public static EmptyResponseProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public EmptyResponseProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyResponseProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable;
+ }
+
+ private void initFields() {
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) obj;
+
+ boolean result = true;
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyResponseProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto getDefaultInstanceForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto build() {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildPartial() {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto(this);
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) {
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other) {
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()) return this;
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ }
+ }
+ }
+
+
+ // @@protoc_insertion_point(builder_scope:EmptyResponseProto)
+ }
+
+ static {
+ defaultInstance = new EmptyResponseProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:EmptyResponseProto)
+ }
+
+ public interface EchoRequestProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string message = 1;
+ boolean hasMessage();
+ String getMessage();
+ }
+ public static final class EchoRequestProto extends
+ com.google.protobuf.GeneratedMessage
+ implements EchoRequestProtoOrBuilder {
+ // Use EchoRequestProto.newBuilder() to construct.
+ private EchoRequestProto(Builder builder) {
+ super(builder);
+ }
+ private EchoRequestProto(boolean noInit) {}
+
+ private static final EchoRequestProto defaultInstance;
+ public static EchoRequestProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public EchoRequestProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoRequestProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required string message = 1;
+ public static final int MESSAGE_FIELD_NUMBER = 1;
+ private java.lang.Object message_;
+ public boolean hasMessage() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getMessage() {
+ java.lang.Object ref = message_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ message_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getMessageBytes() {
+ java.lang.Object ref = message_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ message_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ message_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasMessage()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getMessageBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getMessageBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) obj;
+
+ boolean result = true;
+ result = result && (hasMessage() == other.hasMessage());
+ if (hasMessage()) {
+ result = result && getMessage()
+ .equals(other.getMessage());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasMessage()) {
+ hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
+ hash = (53 * hash) + getMessage().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoRequestProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ message_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto getDefaultInstanceForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto build() {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildPartial() {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.message_ = message_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) {
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other) {
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance()) return this;
+ if (other.hasMessage()) {
+ setMessage(other.getMessage());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasMessage()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ message_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required string message = 1;
+ private java.lang.Object message_ = "";
+ public boolean hasMessage() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getMessage() {
+ java.lang.Object ref = message_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ message_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setMessage(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ message_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMessage() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ message_ = getDefaultInstance().getMessage();
+ onChanged();
+ return this;
+ }
+ void setMessage(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ message_ = value;
+ onChanged();
+ }
+
+ // @@protoc_insertion_point(builder_scope:EchoRequestProto)
+ }
+
+ static {
+ defaultInstance = new EchoRequestProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:EchoRequestProto)
+ }
+
+ public interface EchoResponseProtoOrBuilder
+ extends com.google.protobuf.MessageOrBuilder {
+
+ // required string message = 1;
+ boolean hasMessage();
+ String getMessage();
+ }
+ public static final class EchoResponseProto extends
+ com.google.protobuf.GeneratedMessage
+ implements EchoResponseProtoOrBuilder {
+ // Use EchoResponseProto.newBuilder() to construct.
+ private EchoResponseProto(Builder builder) {
+ super(builder);
+ }
+ private EchoResponseProto(boolean noInit) {}
+
+ private static final EchoResponseProto defaultInstance;
+ public static EchoResponseProto getDefaultInstance() {
+ return defaultInstance;
+ }
+
+ public EchoResponseProto getDefaultInstanceForType() {
+ return defaultInstance;
+ }
+
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoResponseProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable;
+ }
+
+ private int bitField0_;
+ // required string message = 1;
+ public static final int MESSAGE_FIELD_NUMBER = 1;
+ private java.lang.Object message_;
+ public boolean hasMessage() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getMessage() {
+ java.lang.Object ref = message_;
+ if (ref instanceof String) {
+ return (String) ref;
+ } else {
+ com.google.protobuf.ByteString bs =
+ (com.google.protobuf.ByteString) ref;
+ String s = bs.toStringUtf8();
+ if (com.google.protobuf.Internal.isValidUtf8(bs)) {
+ message_ = s;
+ }
+ return s;
+ }
+ }
+ private com.google.protobuf.ByteString getMessageBytes() {
+ java.lang.Object ref = message_;
+ if (ref instanceof String) {
+ com.google.protobuf.ByteString b =
+ com.google.protobuf.ByteString.copyFromUtf8((String) ref);
+ message_ = b;
+ return b;
+ } else {
+ return (com.google.protobuf.ByteString) ref;
+ }
+ }
+
+ private void initFields() {
+ message_ = "";
+ }
+ private byte memoizedIsInitialized = -1;
+ public final boolean isInitialized() {
+ byte isInitialized = memoizedIsInitialized;
+ if (isInitialized != -1) return isInitialized == 1;
+
+ if (!hasMessage()) {
+ memoizedIsInitialized = 0;
+ return false;
+ }
+ memoizedIsInitialized = 1;
+ return true;
+ }
+
+ public void writeTo(com.google.protobuf.CodedOutputStream output)
+ throws java.io.IOException {
+ getSerializedSize();
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ output.writeBytes(1, getMessageBytes());
+ }
+ getUnknownFields().writeTo(output);
+ }
+
+ private int memoizedSerializedSize = -1;
+ public int getSerializedSize() {
+ int size = memoizedSerializedSize;
+ if (size != -1) return size;
+
+ size = 0;
+ if (((bitField0_ & 0x00000001) == 0x00000001)) {
+ size += com.google.protobuf.CodedOutputStream
+ .computeBytesSize(1, getMessageBytes());
+ }
+ size += getUnknownFields().getSerializedSize();
+ memoizedSerializedSize = size;
+ return size;
+ }
+
+ private static final long serialVersionUID = 0L;
+ @java.lang.Override
+ protected java.lang.Object writeReplace()
+ throws java.io.ObjectStreamException {
+ return super.writeReplace();
+ }
+
+ @java.lang.Override
+ public boolean equals(final java.lang.Object obj) {
+ if (obj == this) {
+ return true;
+ }
+ if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)) {
+ return super.equals(obj);
+ }
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) obj;
+
+ boolean result = true;
+ result = result && (hasMessage() == other.hasMessage());
+ if (hasMessage()) {
+ result = result && getMessage()
+ .equals(other.getMessage());
+ }
+ result = result &&
+ getUnknownFields().equals(other.getUnknownFields());
+ return result;
+ }
+
+ @java.lang.Override
+ public int hashCode() {
+ int hash = 41;
+ hash = (19 * hash) + getDescriptorForType().hashCode();
+ if (hasMessage()) {
+ hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
+ hash = (53 * hash) + getMessage().hashCode();
+ }
+ hash = (29 * hash) + getUnknownFields().hashCode();
+ return hash;
+ }
+
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
+ com.google.protobuf.ByteString data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
+ com.google.protobuf.ByteString data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(byte[] data)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
+ byte[] data,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ return newBuilder().mergeFrom(data, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(java.io.InputStream input)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(
+ java.io.InputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ Builder builder = newBuilder();
+ if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
+ return builder.buildParsed();
+ } else {
+ return null;
+ }
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
+ com.google.protobuf.CodedInputStream input)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input).buildParsed();
+ }
+ public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ return newBuilder().mergeFrom(input, extensionRegistry)
+ .buildParsed();
+ }
+
+ public static Builder newBuilder() { return Builder.create(); }
+ public Builder newBuilderForType() { return newBuilder(); }
+ public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto prototype) {
+ return newBuilder().mergeFrom(prototype);
+ }
+ public Builder toBuilder() { return newBuilder(this); }
+
+ @java.lang.Override
+ protected Builder newBuilderForType(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ Builder builder = new Builder(parent);
+ return builder;
+ }
+ public static final class Builder extends
+ com.google.protobuf.GeneratedMessage.Builder
+ implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProtoOrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoResponseProto_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable;
+ }
+
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ message_ = "";
+ bitField0_ = (bitField0_ & ~0x00000001);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDescriptor();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto getDefaultInstanceForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto build() {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ private org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildParsed()
+ throws com.google.protobuf.InvalidProtocolBufferException {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(
+ result).asInvalidProtocolBufferException();
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildPartial() {
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.message_ = message_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) {
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other) {
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()) return this;
+ if (other.hasMessage()) {
+ setMessage(other.getMessage());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ if (!hasMessage()) {
+
+ return false;
+ }
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ com.google.protobuf.UnknownFieldSet.Builder unknownFields =
+ com.google.protobuf.UnknownFieldSet.newBuilder(
+ this.getUnknownFields());
+ while (true) {
+ int tag = input.readTag();
+ switch (tag) {
+ case 0:
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ default: {
+ if (!parseUnknownField(input, unknownFields,
+ extensionRegistry, tag)) {
+ this.setUnknownFields(unknownFields.build());
+ onChanged();
+ return this;
+ }
+ break;
+ }
+ case 10: {
+ bitField0_ |= 0x00000001;
+ message_ = input.readBytes();
+ break;
+ }
+ }
+ }
+ }
+
+ private int bitField0_;
+
+ // required string message = 1;
+ private java.lang.Object message_ = "";
+ public boolean hasMessage() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ public String getMessage() {
+ java.lang.Object ref = message_;
+ if (!(ref instanceof String)) {
+ String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
+ message_ = s;
+ return s;
+ } else {
+ return (String) ref;
+ }
+ }
+ public Builder setMessage(String value) {
+ if (value == null) {
+ throw new NullPointerException();
+ }
+ bitField0_ |= 0x00000001;
+ message_ = value;
+ onChanged();
+ return this;
+ }
+ public Builder clearMessage() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ message_ = getDefaultInstance().getMessage();
+ onChanged();
+ return this;
+ }
+ void setMessage(com.google.protobuf.ByteString value) {
+ bitField0_ |= 0x00000001;
+ message_ = value;
+ onChanged();
+ }
+
+ // @@protoc_insertion_point(builder_scope:EchoResponseProto)
+ }
+
+ static {
+ defaultInstance = new EchoResponseProto(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:EchoResponseProto)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_EmptyRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_EmptyRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_EmptyResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_EmptyResponseProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_EchoRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_EchoRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_EchoResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_EchoResponseProto_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\ntest.proto\"\023\n\021EmptyRequestProto\"\024\n\022Emp" +
+ "tyResponseProto\"#\n\020EchoRequestProto\022\017\n\007m" +
+ "essage\030\001 \002(\t\"$\n\021EchoResponseProto\022\017\n\007mes" +
+ "sage\030\001 \002(\tB/\n\036org.apache.hadoop.ipc.prot" +
+ "obufB\nTestProtos\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_EmptyRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_EmptyRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_EmptyRequestProto_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
+ internal_static_EmptyResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_EmptyResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_EmptyResponseProto_descriptor,
+ new java.lang.String[] { },
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
+ internal_static_EchoRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(2);
+ internal_static_EchoRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_EchoRequestProto_descriptor,
+ new java.lang.String[] { "Message", },
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
+ internal_static_EchoResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(3);
+ internal_static_EchoResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_EchoResponseProto_descriptor,
+ new java.lang.String[] { "Message", },
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtos.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtos.java
new file mode 100644
index 00000000000..214b04f6b90
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtos.java
@@ -0,0 +1,395 @@
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: test_rpc_service.proto
+
+package org.apache.hadoop.ipc.protobuf;
+
+public final class TestRpcServiceProtos {
+ private TestRpcServiceProtos() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ public static abstract class TestProtobufRpcProto
+ implements com.google.protobuf.Service {
+ protected TestProtobufRpcProto() {}
+
+ public interface Interface {
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new TestProtobufRpcProto() {
+ @java.lang.Override
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.ping(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.echo(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.error(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
+ case 1:
+ return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request);
+ case 2:
+ return impl.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public abstract void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(0);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 1:
+ this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 2:
+ this.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcProto implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()));
+ }
+
+ public void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
+ }
+
+ }
+ }
+
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\026test_rpc_service.proto\032\ntest.proto2\250\001\n" +
+ "\024TestProtobufRpcProto\022/\n\004ping\022\022.EmptyReq" +
+ "uestProto\032\023.EmptyResponseProto\022-\n\004echo\022\021" +
+ ".EchoRequestProto\032\022.EchoResponseProto\0220\n" +
+ "\005error\022\022.EmptyRequestProto\032\023.EmptyRespon" +
+ "seProtoB<\n\036org.apache.hadoop.ipc.protobu" +
+ "fB\024TestRpcServiceProtos\210\001\001\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ org.apache.hadoop.ipc.protobuf.TestProtos.getDescriptor(),
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/proto/test.proto b/hadoop-common-project/hadoop-common/src/test/proto/test.proto
new file mode 100644
index 00000000000..71f4427052c
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/proto/test.proto
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+option java_package = "org.apache.hadoop.ipc.protobuf";
+option java_outer_classname = "TestProtos";
+option java_generate_equals_and_hash = true;
+
+message EmptyRequestProto {
+}
+
+message EmptyResponseProto {
+}
+
+message EchoRequestProto {
+ required string message = 1;
+}
+
+message EchoResponseProto {
+ required string message = 1;
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto b/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
new file mode 100644
index 00000000000..14ba0ae170d
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/proto/test_rpc_service.proto
@@ -0,0 +1,33 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+option java_package = "org.apache.hadoop.ipc.protobuf";
+option java_outer_classname = "TestRpcServiceProtos";
+option java_generic_services = true;
+option java_generate_equals_and_hash = true;
+
+import "test.proto";
+
+
+/**
+ * A protobuf service for use in tests
+ */
+service TestProtobufRpcProto {
+ rpc ping(EmptyRequestProto) returns (EmptyResponseProto);
+ rpc echo(EchoRequestProto) returns (EchoResponseProto);
+ rpc error(EmptyRequestProto) returns (EmptyResponseProto);
+}