diff --git a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
index 87569e24cfc..1994fc23e41 100644
--- a/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
+++ b/hbase-protocol/src/main/java/org/apache/hadoop/hbase/protobuf/generated/ClientProtos.java
@@ -15934,2306 +15934,6 @@ public final class ClientProtos {
// @@protoc_insertion_point(class_scope:BulkLoadHFileResponse)
}
- public interface ExecOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required bytes row = 1;
- boolean hasRow();
- com.google.protobuf.ByteString getRow();
-
- // required string protocolName = 2;
- boolean hasProtocolName();
- String getProtocolName();
-
- // required string methodName = 3;
- boolean hasMethodName();
- String getMethodName();
-
- // repeated .NameStringPair property = 4;
- java.util.List
- getPropertyList();
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index);
- int getPropertyCount();
- java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
- getPropertyOrBuilderList();
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder(
- int index);
-
- // repeated .NameBytesPair parameter = 5;
- java.util.List
- getParameterList();
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index);
- int getParameterCount();
- java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
- getParameterOrBuilderList();
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder(
- int index);
- }
- public static final class Exec extends
- com.google.protobuf.GeneratedMessage
- implements ExecOrBuilder {
- // Use Exec.newBuilder() to construct.
- private Exec(Builder builder) {
- super(builder);
- }
- private Exec(boolean noInit) {}
-
- private static final Exec defaultInstance;
- public static Exec getDefaultInstance() {
- return defaultInstance;
- }
-
- public Exec getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable;
- }
-
- private int bitField0_;
- // required bytes row = 1;
- public static final int ROW_FIELD_NUMBER = 1;
- private com.google.protobuf.ByteString row_;
- public boolean hasRow() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public com.google.protobuf.ByteString getRow() {
- return row_;
- }
-
- // required string protocolName = 2;
- public static final int PROTOCOLNAME_FIELD_NUMBER = 2;
- private java.lang.Object protocolName_;
- public boolean hasProtocolName() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public String getProtocolName() {
- java.lang.Object ref = protocolName_;
- if (ref instanceof String) {
- return (String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- String s = bs.toStringUtf8();
- if (com.google.protobuf.Internal.isValidUtf8(bs)) {
- protocolName_ = s;
- }
- return s;
- }
- }
- private com.google.protobuf.ByteString getProtocolNameBytes() {
- java.lang.Object ref = protocolName_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8((String) ref);
- protocolName_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- // required string methodName = 3;
- public static final int METHODNAME_FIELD_NUMBER = 3;
- private java.lang.Object methodName_;
- public boolean hasMethodName() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- public String getMethodName() {
- java.lang.Object ref = methodName_;
- if (ref instanceof String) {
- return (String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- String s = bs.toStringUtf8();
- if (com.google.protobuf.Internal.isValidUtf8(bs)) {
- methodName_ = s;
- }
- return s;
- }
- }
- private com.google.protobuf.ByteString getMethodNameBytes() {
- java.lang.Object ref = methodName_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8((String) ref);
- methodName_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- // repeated .NameStringPair property = 4;
- public static final int PROPERTY_FIELD_NUMBER = 4;
- private java.util.List property_;
- public java.util.List getPropertyList() {
- return property_;
- }
- public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
- getPropertyOrBuilderList() {
- return property_;
- }
- public int getPropertyCount() {
- return property_.size();
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) {
- return property_.get(index);
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder(
- int index) {
- return property_.get(index);
- }
-
- // repeated .NameBytesPair parameter = 5;
- public static final int PARAMETER_FIELD_NUMBER = 5;
- private java.util.List parameter_;
- public java.util.List getParameterList() {
- return parameter_;
- }
- public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
- getParameterOrBuilderList() {
- return parameter_;
- }
- public int getParameterCount() {
- return parameter_.size();
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) {
- return parameter_.get(index);
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder(
- int index) {
- return parameter_.get(index);
- }
-
- private void initFields() {
- row_ = com.google.protobuf.ByteString.EMPTY;
- protocolName_ = "";
- methodName_ = "";
- property_ = java.util.Collections.emptyList();
- parameter_ = java.util.Collections.emptyList();
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasRow()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!hasProtocolName()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!hasMethodName()) {
- memoizedIsInitialized = 0;
- return false;
- }
- for (int i = 0; i < getPropertyCount(); i++) {
- if (!getProperty(i).isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- }
- for (int i = 0; i < getParameterCount(); i++) {
- if (!getParameter(i).isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, row_);
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, getProtocolNameBytes());
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- output.writeBytes(3, getMethodNameBytes());
- }
- for (int i = 0; i < property_.size(); i++) {
- output.writeMessage(4, property_.get(i));
- }
- for (int i = 0; i < parameter_.size(); i++) {
- output.writeMessage(5, parameter_.get(i));
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, row_);
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, getProtocolNameBytes());
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(3, getMethodNameBytes());
- }
- for (int i = 0; i < property_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(4, property_.get(i));
- }
- for (int i = 0; i < parameter_.size(); i++) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(5, parameter_.get(i));
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)) {
- return super.equals(obj);
- }
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) obj;
-
- boolean result = true;
- result = result && (hasRow() == other.hasRow());
- if (hasRow()) {
- result = result && getRow()
- .equals(other.getRow());
- }
- result = result && (hasProtocolName() == other.hasProtocolName());
- if (hasProtocolName()) {
- result = result && getProtocolName()
- .equals(other.getProtocolName());
- }
- result = result && (hasMethodName() == other.hasMethodName());
- if (hasMethodName()) {
- result = result && getMethodName()
- .equals(other.getMethodName());
- }
- result = result && getPropertyList()
- .equals(other.getPropertyList());
- result = result && getParameterList()
- .equals(other.getParameterList());
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasRow()) {
- hash = (37 * hash) + ROW_FIELD_NUMBER;
- hash = (53 * hash) + getRow().hashCode();
- }
- if (hasProtocolName()) {
- hash = (37 * hash) + PROTOCOLNAME_FIELD_NUMBER;
- hash = (53 * hash) + getProtocolName().hashCode();
- }
- if (hasMethodName()) {
- hash = (37 * hash) + METHODNAME_FIELD_NUMBER;
- hash = (53 * hash) + getMethodName().hashCode();
- }
- if (getPropertyCount() > 0) {
- hash = (37 * hash) + PROPERTY_FIELD_NUMBER;
- hash = (53 * hash) + getPropertyList().hashCode();
- }
- if (getParameterCount() > 0) {
- hash = (37 * hash) + PARAMETER_FIELD_NUMBER;
- hash = (53 * hash) + getParameterList().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_Exec_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- getPropertyFieldBuilder();
- getParameterFieldBuilder();
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- row_ = com.google.protobuf.ByteString.EMPTY;
- bitField0_ = (bitField0_ & ~0x00000001);
- protocolName_ = "";
- bitField0_ = (bitField0_ & ~0x00000002);
- methodName_ = "";
- bitField0_ = (bitField0_ & ~0x00000004);
- if (propertyBuilder_ == null) {
- property_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000008);
- } else {
- propertyBuilder_.clear();
- }
- if (parameterBuilder_ == null) {
- parameter_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000010);
- } else {
- parameterBuilder_.clear();
- }
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDescriptor();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getDefaultInstanceForType() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec build() {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec buildPartial() {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.row_ = row_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000002;
- }
- result.protocolName_ = protocolName_;
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
- to_bitField0_ |= 0x00000004;
- }
- result.methodName_ = methodName_;
- if (propertyBuilder_ == null) {
- if (((bitField0_ & 0x00000008) == 0x00000008)) {
- property_ = java.util.Collections.unmodifiableList(property_);
- bitField0_ = (bitField0_ & ~0x00000008);
- }
- result.property_ = property_;
- } else {
- result.property_ = propertyBuilder_.build();
- }
- if (parameterBuilder_ == null) {
- if (((bitField0_ & 0x00000010) == 0x00000010)) {
- parameter_ = java.util.Collections.unmodifiableList(parameter_);
- bitField0_ = (bitField0_ & ~0x00000010);
- }
- result.parameter_ = parameter_;
- } else {
- result.parameter_ = parameterBuilder_.build();
- }
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec) {
- return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec other) {
- if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) return this;
- if (other.hasRow()) {
- setRow(other.getRow());
- }
- if (other.hasProtocolName()) {
- setProtocolName(other.getProtocolName());
- }
- if (other.hasMethodName()) {
- setMethodName(other.getMethodName());
- }
- if (propertyBuilder_ == null) {
- if (!other.property_.isEmpty()) {
- if (property_.isEmpty()) {
- property_ = other.property_;
- bitField0_ = (bitField0_ & ~0x00000008);
- } else {
- ensurePropertyIsMutable();
- property_.addAll(other.property_);
- }
- onChanged();
- }
- } else {
- if (!other.property_.isEmpty()) {
- if (propertyBuilder_.isEmpty()) {
- propertyBuilder_.dispose();
- propertyBuilder_ = null;
- property_ = other.property_;
- bitField0_ = (bitField0_ & ~0x00000008);
- propertyBuilder_ =
- com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
- getPropertyFieldBuilder() : null;
- } else {
- propertyBuilder_.addAllMessages(other.property_);
- }
- }
- }
- if (parameterBuilder_ == null) {
- if (!other.parameter_.isEmpty()) {
- if (parameter_.isEmpty()) {
- parameter_ = other.parameter_;
- bitField0_ = (bitField0_ & ~0x00000010);
- } else {
- ensureParameterIsMutable();
- parameter_.addAll(other.parameter_);
- }
- onChanged();
- }
- } else {
- if (!other.parameter_.isEmpty()) {
- if (parameterBuilder_.isEmpty()) {
- parameterBuilder_.dispose();
- parameterBuilder_ = null;
- parameter_ = other.parameter_;
- bitField0_ = (bitField0_ & ~0x00000010);
- parameterBuilder_ =
- com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders ?
- getParameterFieldBuilder() : null;
- } else {
- parameterBuilder_.addAllMessages(other.parameter_);
- }
- }
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasRow()) {
-
- return false;
- }
- if (!hasProtocolName()) {
-
- return false;
- }
- if (!hasMethodName()) {
-
- return false;
- }
- for (int i = 0; i < getPropertyCount(); i++) {
- if (!getProperty(i).isInitialized()) {
-
- return false;
- }
- }
- for (int i = 0; i < getParameterCount(); i++) {
- if (!getParameter(i).isInitialized()) {
-
- return false;
- }
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- row_ = input.readBytes();
- break;
- }
- case 18: {
- bitField0_ |= 0x00000002;
- protocolName_ = input.readBytes();
- break;
- }
- case 26: {
- bitField0_ |= 0x00000004;
- methodName_ = input.readBytes();
- break;
- }
- case 34: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.newBuilder();
- input.readMessage(subBuilder, extensionRegistry);
- addProperty(subBuilder.buildPartial());
- break;
- }
- case 42: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder();
- input.readMessage(subBuilder, extensionRegistry);
- addParameter(subBuilder.buildPartial());
- break;
- }
- }
- }
- }
-
- private int bitField0_;
-
- // required bytes row = 1;
- private com.google.protobuf.ByteString row_ = com.google.protobuf.ByteString.EMPTY;
- public boolean hasRow() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public com.google.protobuf.ByteString getRow() {
- return row_;
- }
- public Builder setRow(com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- row_ = value;
- onChanged();
- return this;
- }
- public Builder clearRow() {
- bitField0_ = (bitField0_ & ~0x00000001);
- row_ = getDefaultInstance().getRow();
- onChanged();
- return this;
- }
-
- // required string protocolName = 2;
- private java.lang.Object protocolName_ = "";
- public boolean hasProtocolName() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public String getProtocolName() {
- java.lang.Object ref = protocolName_;
- if (!(ref instanceof String)) {
- String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
- protocolName_ = s;
- return s;
- } else {
- return (String) ref;
- }
- }
- public Builder setProtocolName(String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- protocolName_ = value;
- onChanged();
- return this;
- }
- public Builder clearProtocolName() {
- bitField0_ = (bitField0_ & ~0x00000002);
- protocolName_ = getDefaultInstance().getProtocolName();
- onChanged();
- return this;
- }
- void setProtocolName(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000002;
- protocolName_ = value;
- onChanged();
- }
-
- // required string methodName = 3;
- private java.lang.Object methodName_ = "";
- public boolean hasMethodName() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- public String getMethodName() {
- java.lang.Object ref = methodName_;
- if (!(ref instanceof String)) {
- String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
- methodName_ = s;
- return s;
- } else {
- return (String) ref;
- }
- }
- public Builder setMethodName(String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000004;
- methodName_ = value;
- onChanged();
- return this;
- }
- public Builder clearMethodName() {
- bitField0_ = (bitField0_ & ~0x00000004);
- methodName_ = getDefaultInstance().getMethodName();
- onChanged();
- return this;
- }
- void setMethodName(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000004;
- methodName_ = value;
- onChanged();
- }
-
- // repeated .NameStringPair property = 4;
- private java.util.List property_ =
- java.util.Collections.emptyList();
- private void ensurePropertyIsMutable() {
- if (!((bitField0_ & 0x00000008) == 0x00000008)) {
- property_ = new java.util.ArrayList(property_);
- bitField0_ |= 0x00000008;
- }
- }
-
- private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder> propertyBuilder_;
-
- public java.util.List getPropertyList() {
- if (propertyBuilder_ == null) {
- return java.util.Collections.unmodifiableList(property_);
- } else {
- return propertyBuilder_.getMessageList();
- }
- }
- public int getPropertyCount() {
- if (propertyBuilder_ == null) {
- return property_.size();
- } else {
- return propertyBuilder_.getCount();
- }
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair getProperty(int index) {
- if (propertyBuilder_ == null) {
- return property_.get(index);
- } else {
- return propertyBuilder_.getMessage(index);
- }
- }
- public Builder setProperty(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
- if (propertyBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensurePropertyIsMutable();
- property_.set(index, value);
- onChanged();
- } else {
- propertyBuilder_.setMessage(index, value);
- }
- return this;
- }
- public Builder setProperty(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
- if (propertyBuilder_ == null) {
- ensurePropertyIsMutable();
- property_.set(index, builderForValue.build());
- onChanged();
- } else {
- propertyBuilder_.setMessage(index, builderForValue.build());
- }
- return this;
- }
- public Builder addProperty(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
- if (propertyBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensurePropertyIsMutable();
- property_.add(value);
- onChanged();
- } else {
- propertyBuilder_.addMessage(value);
- }
- return this;
- }
- public Builder addProperty(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair value) {
- if (propertyBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensurePropertyIsMutable();
- property_.add(index, value);
- onChanged();
- } else {
- propertyBuilder_.addMessage(index, value);
- }
- return this;
- }
- public Builder addProperty(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
- if (propertyBuilder_ == null) {
- ensurePropertyIsMutable();
- property_.add(builderForValue.build());
- onChanged();
- } else {
- propertyBuilder_.addMessage(builderForValue.build());
- }
- return this;
- }
- public Builder addProperty(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder builderForValue) {
- if (propertyBuilder_ == null) {
- ensurePropertyIsMutable();
- property_.add(index, builderForValue.build());
- onChanged();
- } else {
- propertyBuilder_.addMessage(index, builderForValue.build());
- }
- return this;
- }
- public Builder addAllProperty(
- java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair> values) {
- if (propertyBuilder_ == null) {
- ensurePropertyIsMutable();
- super.addAll(values, property_);
- onChanged();
- } else {
- propertyBuilder_.addAllMessages(values);
- }
- return this;
- }
- public Builder clearProperty() {
- if (propertyBuilder_ == null) {
- property_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000008);
- onChanged();
- } else {
- propertyBuilder_.clear();
- }
- return this;
- }
- public Builder removeProperty(int index) {
- if (propertyBuilder_ == null) {
- ensurePropertyIsMutable();
- property_.remove(index);
- onChanged();
- } else {
- propertyBuilder_.remove(index);
- }
- return this;
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder getPropertyBuilder(
- int index) {
- return getPropertyFieldBuilder().getBuilder(index);
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder getPropertyOrBuilder(
- int index) {
- if (propertyBuilder_ == null) {
- return property_.get(index); } else {
- return propertyBuilder_.getMessageOrBuilder(index);
- }
- }
- public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
- getPropertyOrBuilderList() {
- if (propertyBuilder_ != null) {
- return propertyBuilder_.getMessageOrBuilderList();
- } else {
- return java.util.Collections.unmodifiableList(property_);
- }
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder() {
- return getPropertyFieldBuilder().addBuilder(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder addPropertyBuilder(
- int index) {
- return getPropertyFieldBuilder().addBuilder(
- index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.getDefaultInstance());
- }
- public java.util.List
- getPropertyBuilderList() {
- return getPropertyFieldBuilder().getBuilderList();
- }
- private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>
- getPropertyFieldBuilder() {
- if (propertyBuilder_ == null) {
- propertyBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPairOrBuilder>(
- property_,
- ((bitField0_ & 0x00000008) == 0x00000008),
- getParentForChildren(),
- isClean());
- property_ = null;
- }
- return propertyBuilder_;
- }
-
- // repeated .NameBytesPair parameter = 5;
- private java.util.List parameter_ =
- java.util.Collections.emptyList();
- private void ensureParameterIsMutable() {
- if (!((bitField0_ & 0x00000010) == 0x00000010)) {
- parameter_ = new java.util.ArrayList(parameter_);
- bitField0_ |= 0x00000010;
- }
- }
-
- private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> parameterBuilder_;
-
- public java.util.List getParameterList() {
- if (parameterBuilder_ == null) {
- return java.util.Collections.unmodifiableList(parameter_);
- } else {
- return parameterBuilder_.getMessageList();
- }
- }
- public int getParameterCount() {
- if (parameterBuilder_ == null) {
- return parameter_.size();
- } else {
- return parameterBuilder_.getCount();
- }
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getParameter(int index) {
- if (parameterBuilder_ == null) {
- return parameter_.get(index);
- } else {
- return parameterBuilder_.getMessage(index);
- }
- }
- public Builder setParameter(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
- if (parameterBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensureParameterIsMutable();
- parameter_.set(index, value);
- onChanged();
- } else {
- parameterBuilder_.setMessage(index, value);
- }
- return this;
- }
- public Builder setParameter(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
- if (parameterBuilder_ == null) {
- ensureParameterIsMutable();
- parameter_.set(index, builderForValue.build());
- onChanged();
- } else {
- parameterBuilder_.setMessage(index, builderForValue.build());
- }
- return this;
- }
- public Builder addParameter(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
- if (parameterBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensureParameterIsMutable();
- parameter_.add(value);
- onChanged();
- } else {
- parameterBuilder_.addMessage(value);
- }
- return this;
- }
- public Builder addParameter(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
- if (parameterBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- ensureParameterIsMutable();
- parameter_.add(index, value);
- onChanged();
- } else {
- parameterBuilder_.addMessage(index, value);
- }
- return this;
- }
- public Builder addParameter(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
- if (parameterBuilder_ == null) {
- ensureParameterIsMutable();
- parameter_.add(builderForValue.build());
- onChanged();
- } else {
- parameterBuilder_.addMessage(builderForValue.build());
- }
- return this;
- }
- public Builder addParameter(
- int index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
- if (parameterBuilder_ == null) {
- ensureParameterIsMutable();
- parameter_.add(index, builderForValue.build());
- onChanged();
- } else {
- parameterBuilder_.addMessage(index, builderForValue.build());
- }
- return this;
- }
- public Builder addAllParameter(
- java.lang.Iterable extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair> values) {
- if (parameterBuilder_ == null) {
- ensureParameterIsMutable();
- super.addAll(values, parameter_);
- onChanged();
- } else {
- parameterBuilder_.addAllMessages(values);
- }
- return this;
- }
- public Builder clearParameter() {
- if (parameterBuilder_ == null) {
- parameter_ = java.util.Collections.emptyList();
- bitField0_ = (bitField0_ & ~0x00000010);
- onChanged();
- } else {
- parameterBuilder_.clear();
- }
- return this;
- }
- public Builder removeParameter(int index) {
- if (parameterBuilder_ == null) {
- ensureParameterIsMutable();
- parameter_.remove(index);
- onChanged();
- } else {
- parameterBuilder_.remove(index);
- }
- return this;
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getParameterBuilder(
- int index) {
- return getParameterFieldBuilder().getBuilder(index);
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getParameterOrBuilder(
- int index) {
- if (parameterBuilder_ == null) {
- return parameter_.get(index); } else {
- return parameterBuilder_.getMessageOrBuilder(index);
- }
- }
- public java.util.List extends org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
- getParameterOrBuilderList() {
- if (parameterBuilder_ != null) {
- return parameterBuilder_.getMessageOrBuilderList();
- } else {
- return java.util.Collections.unmodifiableList(parameter_);
- }
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder() {
- return getParameterFieldBuilder().addBuilder(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder addParameterBuilder(
- int index) {
- return getParameterFieldBuilder().addBuilder(
- index, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance());
- }
- public java.util.List
- getParameterBuilderList() {
- return getParameterFieldBuilder().getBuilderList();
- }
- private com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
- getParameterFieldBuilder() {
- if (parameterBuilder_ == null) {
- parameterBuilder_ = new com.google.protobuf.RepeatedFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
- parameter_,
- ((bitField0_ & 0x00000010) == 0x00000010),
- getParentForChildren(),
- isClean());
- parameter_ = null;
- }
- return parameterBuilder_;
- }
-
- // @@protoc_insertion_point(builder_scope:Exec)
- }
-
- static {
- defaultInstance = new Exec(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:Exec)
- }
-
- public interface ExecCoprocessorRequestOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required .RegionSpecifier region = 1;
- boolean hasRegion();
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion();
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder();
-
- // required .Exec call = 2;
- boolean hasCall();
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall();
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder();
- }
- public static final class ExecCoprocessorRequest extends
- com.google.protobuf.GeneratedMessage
- implements ExecCoprocessorRequestOrBuilder {
- // Use ExecCoprocessorRequest.newBuilder() to construct.
- private ExecCoprocessorRequest(Builder builder) {
- super(builder);
- }
- private ExecCoprocessorRequest(boolean noInit) {}
-
- private static final ExecCoprocessorRequest defaultInstance;
- public static ExecCoprocessorRequest getDefaultInstance() {
- return defaultInstance;
- }
-
- public ExecCoprocessorRequest getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable;
- }
-
- private int bitField0_;
- // required .RegionSpecifier region = 1;
- public static final int REGION_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_;
- public boolean hasRegion() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
- return region_;
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
- return region_;
- }
-
- // required .Exec call = 2;
- public static final int CALL_FIELD_NUMBER = 2;
- private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_;
- public boolean hasCall() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() {
- return call_;
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() {
- return call_;
- }
-
- private void initFields() {
- region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
- call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance();
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasRegion()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!hasCall()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!getRegion().isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!getCall().isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeMessage(1, region_);
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeMessage(2, call_);
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, region_);
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(2, call_);
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)) {
- return super.equals(obj);
- }
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) obj;
-
- boolean result = true;
- result = result && (hasRegion() == other.hasRegion());
- if (hasRegion()) {
- result = result && getRegion()
- .equals(other.getRegion());
- }
- result = result && (hasCall() == other.hasCall());
- if (hasCall()) {
- result = result && getCall()
- .equals(other.getCall());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasRegion()) {
- hash = (37 * hash) + REGION_FIELD_NUMBER;
- hash = (53 * hash) + getRegion().hashCode();
- }
- if (hasCall()) {
- hash = (37 * hash) + CALL_FIELD_NUMBER;
- hash = (53 * hash) + getCall().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequestOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorRequest_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- getRegionFieldBuilder();
- getCallFieldBuilder();
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- if (regionBuilder_ == null) {
- region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
- } else {
- regionBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000001);
- if (callBuilder_ == null) {
- call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance();
- } else {
- callBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000002);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDescriptor();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest getDefaultInstanceForType() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest build() {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest buildPartial() {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- if (regionBuilder_ == null) {
- result.region_ = region_;
- } else {
- result.region_ = regionBuilder_.build();
- }
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000002;
- }
- if (callBuilder_ == null) {
- result.call_ = call_;
- } else {
- result.call_ = callBuilder_.build();
- }
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest) {
- return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest other) {
- if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance()) return this;
- if (other.hasRegion()) {
- mergeRegion(other.getRegion());
- }
- if (other.hasCall()) {
- mergeCall(other.getCall());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasRegion()) {
-
- return false;
- }
- if (!hasCall()) {
-
- return false;
- }
- if (!getRegion().isInitialized()) {
-
- return false;
- }
- if (!getCall().isInitialized()) {
-
- return false;
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder();
- if (hasRegion()) {
- subBuilder.mergeFrom(getRegion());
- }
- input.readMessage(subBuilder, extensionRegistry);
- setRegion(subBuilder.buildPartial());
- break;
- }
- case 18: {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder();
- if (hasCall()) {
- subBuilder.mergeFrom(getCall());
- }
- input.readMessage(subBuilder, extensionRegistry);
- setCall(subBuilder.buildPartial());
- break;
- }
- }
- }
- }
-
- private int bitField0_;
-
- // required .RegionSpecifier region = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder> regionBuilder_;
- public boolean hasRegion() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier getRegion() {
- if (regionBuilder_ == null) {
- return region_;
- } else {
- return regionBuilder_.getMessage();
- }
- }
- public Builder setRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
- if (regionBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- region_ = value;
- onChanged();
- } else {
- regionBuilder_.setMessage(value);
- }
- bitField0_ |= 0x00000001;
- return this;
- }
- public Builder setRegion(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder builderForValue) {
- if (regionBuilder_ == null) {
- region_ = builderForValue.build();
- onChanged();
- } else {
- regionBuilder_.setMessage(builderForValue.build());
- }
- bitField0_ |= 0x00000001;
- return this;
- }
- public Builder mergeRegion(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier value) {
- if (regionBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001) &&
- region_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance()) {
- region_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.newBuilder(region_).mergeFrom(value).buildPartial();
- } else {
- region_ = value;
- }
- onChanged();
- } else {
- regionBuilder_.mergeFrom(value);
- }
- bitField0_ |= 0x00000001;
- return this;
- }
- public Builder clearRegion() {
- if (regionBuilder_ == null) {
- region_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.getDefaultInstance();
- onChanged();
- } else {
- regionBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000001);
- return this;
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder getRegionBuilder() {
- bitField0_ |= 0x00000001;
- onChanged();
- return getRegionFieldBuilder().getBuilder();
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder getRegionOrBuilder() {
- if (regionBuilder_ != null) {
- return regionBuilder_.getMessageOrBuilder();
- } else {
- return region_;
- }
- }
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>
- getRegionFieldBuilder() {
- if (regionBuilder_ == null) {
- regionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifierOrBuilder>(
- region_,
- getParentForChildren(),
- isClean());
- region_ = null;
- }
- return regionBuilder_;
- }
-
- // required .Exec call = 2;
- private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> callBuilder_;
- public boolean hasCall() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getCall() {
- if (callBuilder_ == null) {
- return call_;
- } else {
- return callBuilder_.getMessage();
- }
- }
- public Builder setCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) {
- if (callBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- call_ = value;
- onChanged();
- } else {
- callBuilder_.setMessage(value);
- }
- bitField0_ |= 0x00000002;
- return this;
- }
- public Builder setCall(
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder builderForValue) {
- if (callBuilder_ == null) {
- call_ = builderForValue.build();
- onChanged();
- } else {
- callBuilder_.setMessage(builderForValue.build());
- }
- bitField0_ |= 0x00000002;
- return this;
- }
- public Builder mergeCall(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) {
- if (callBuilder_ == null) {
- if (((bitField0_ & 0x00000002) == 0x00000002) &&
- call_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) {
- call_ =
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(call_).mergeFrom(value).buildPartial();
- } else {
- call_ = value;
- }
- onChanged();
- } else {
- callBuilder_.mergeFrom(value);
- }
- bitField0_ |= 0x00000002;
- return this;
- }
- public Builder clearCall() {
- if (callBuilder_ == null) {
- call_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance();
- onChanged();
- } else {
- callBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000002);
- return this;
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder getCallBuilder() {
- bitField0_ |= 0x00000002;
- onChanged();
- return getCallFieldBuilder().getBuilder();
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getCallOrBuilder() {
- if (callBuilder_ != null) {
- return callBuilder_.getMessageOrBuilder();
- } else {
- return call_;
- }
- }
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>
- getCallFieldBuilder() {
- if (callBuilder_ == null) {
- callBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>(
- call_,
- getParentForChildren(),
- isClean());
- call_ = null;
- }
- return callBuilder_;
- }
-
- // @@protoc_insertion_point(builder_scope:ExecCoprocessorRequest)
- }
-
- static {
- defaultInstance = new ExecCoprocessorRequest(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:ExecCoprocessorRequest)
- }
-
- public interface ExecCoprocessorResponseOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required .NameBytesPair value = 1;
- boolean hasValue();
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue();
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder();
- }
- public static final class ExecCoprocessorResponse extends
- com.google.protobuf.GeneratedMessage
- implements ExecCoprocessorResponseOrBuilder {
- // Use ExecCoprocessorResponse.newBuilder() to construct.
- private ExecCoprocessorResponse(Builder builder) {
- super(builder);
- }
- private ExecCoprocessorResponse(boolean noInit) {}
-
- private static final ExecCoprocessorResponse defaultInstance;
- public static ExecCoprocessorResponse getDefaultInstance() {
- return defaultInstance;
- }
-
- public ExecCoprocessorResponse getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable;
- }
-
- private int bitField0_;
- // required .NameBytesPair value = 1;
- public static final int VALUE_FIELD_NUMBER = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_;
- public boolean hasValue() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
- return value_;
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
- return value_;
- }
-
- private void initFields() {
- value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasValue()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!getValue().isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeMessage(1, value_);
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(1, value_);
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)) {
- return super.equals(obj);
- }
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other = (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) obj;
-
- boolean result = true;
- result = result && (hasValue() == other.hasValue());
- if (hasValue()) {
- result = result && getValue()
- .equals(other.getValue());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasValue()) {
- hash = (37 * hash) + VALUE_FIELD_NUMBER;
- hash = (53 * hash) + getValue().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponseOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.internal_static_ExecCoprocessorResponse_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- getValueFieldBuilder();
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- if (valueBuilder_ == null) {
- value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
- } else {
- valueBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000001);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDescriptor();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse getDefaultInstanceForType() {
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance();
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse build() {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse buildPartial() {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse result = new org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- if (valueBuilder_ == null) {
- result.value_ = value_;
- } else {
- result.value_ = valueBuilder_.build();
- }
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) {
- return mergeFrom((org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse other) {
- if (other == org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()) return this;
- if (other.hasValue()) {
- mergeValue(other.getValue());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasValue()) {
-
- return false;
- }
- if (!getValue().isInitialized()) {
-
- return false;
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder();
- if (hasValue()) {
- subBuilder.mergeFrom(getValue());
- }
- input.readMessage(subBuilder, extensionRegistry);
- setValue(subBuilder.buildPartial());
- break;
- }
- }
- }
- }
-
- private int bitField0_;
-
- // required .NameBytesPair value = 1;
- private org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder> valueBuilder_;
- public boolean hasValue() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair getValue() {
- if (valueBuilder_ == null) {
- return value_;
- } else {
- return valueBuilder_.getMessage();
- }
- }
- public Builder setValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
- if (valueBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- value_ = value;
- onChanged();
- } else {
- valueBuilder_.setMessage(value);
- }
- bitField0_ |= 0x00000001;
- return this;
- }
- public Builder setValue(
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder builderForValue) {
- if (valueBuilder_ == null) {
- value_ = builderForValue.build();
- onChanged();
- } else {
- valueBuilder_.setMessage(builderForValue.build());
- }
- bitField0_ |= 0x00000001;
- return this;
- }
- public Builder mergeValue(org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair value) {
- if (valueBuilder_ == null) {
- if (((bitField0_ & 0x00000001) == 0x00000001) &&
- value_ != org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance()) {
- value_ =
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.newBuilder(value_).mergeFrom(value).buildPartial();
- } else {
- value_ = value;
- }
- onChanged();
- } else {
- valueBuilder_.mergeFrom(value);
- }
- bitField0_ |= 0x00000001;
- return this;
- }
- public Builder clearValue() {
- if (valueBuilder_ == null) {
- value_ = org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.getDefaultInstance();
- onChanged();
- } else {
- valueBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000001);
- return this;
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder getValueBuilder() {
- bitField0_ |= 0x00000001;
- onChanged();
- return getValueFieldBuilder().getBuilder();
- }
- public org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder getValueOrBuilder() {
- if (valueBuilder_ != null) {
- return valueBuilder_.getMessageOrBuilder();
- } else {
- return value_;
- }
- }
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>
- getValueFieldBuilder() {
- if (valueBuilder_ == null) {
- valueBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair.Builder, org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPairOrBuilder>(
- value_,
- getParentForChildren(),
- isClean());
- value_ = null;
- }
- return valueBuilder_;
- }
-
- // @@protoc_insertion_point(builder_scope:ExecCoprocessorResponse)
- }
-
- static {
- defaultInstance = new ExecCoprocessorResponse(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:ExecCoprocessorResponse)
- }
-
public interface CoprocessorServiceCallOrBuilder
extends com.google.protobuf.MessageOrBuilder {
@@ -20209,11 +17909,6 @@ public final class ClientProtos {
boolean hasGet();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get getGet();
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetOrBuilder getGetOrBuilder();
-
- // optional .Exec exec = 3;
- boolean hasExec();
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getExec();
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getExecOrBuilder();
}
public static final class MultiAction extends
com.google.protobuf.GeneratedMessage
@@ -20270,23 +17965,9 @@ public final class ClientProtos {
return get_;
}
- // optional .Exec exec = 3;
- public static final int EXEC_FIELD_NUMBER = 3;
- private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec exec_;
- public boolean hasExec() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getExec() {
- return exec_;
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getExecOrBuilder() {
- return exec_;
- }
-
private void initFields() {
mutate_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.getDefaultInstance();
get_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Get.getDefaultInstance();
- exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance();
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
@@ -20305,12 +17986,6 @@ public final class ClientProtos {
return false;
}
}
- if (hasExec()) {
- if (!getExec().isInitialized()) {
- memoizedIsInitialized = 0;
- return false;
- }
- }
memoizedIsInitialized = 1;
return true;
}
@@ -20324,9 +17999,6 @@ public final class ClientProtos {
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeMessage(2, get_);
}
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- output.writeMessage(3, exec_);
- }
getUnknownFields().writeTo(output);
}
@@ -20344,10 +18016,6 @@ public final class ClientProtos {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, get_);
}
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(3, exec_);
- }
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
@@ -20381,11 +18049,6 @@ public final class ClientProtos {
result = result && getGet()
.equals(other.getGet());
}
- result = result && (hasExec() == other.hasExec());
- if (hasExec()) {
- result = result && getExec()
- .equals(other.getExec());
- }
result = result &&
getUnknownFields().equals(other.getUnknownFields());
return result;
@@ -20403,10 +18066,6 @@ public final class ClientProtos {
hash = (37 * hash) + GET_FIELD_NUMBER;
hash = (53 * hash) + getGet().hashCode();
}
- if (hasExec()) {
- hash = (37 * hash) + EXEC_FIELD_NUMBER;
- hash = (53 * hash) + getExec().hashCode();
- }
hash = (29 * hash) + getUnknownFields().hashCode();
return hash;
}
@@ -20517,7 +18176,6 @@ public final class ClientProtos {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
getMutateFieldBuilder();
getGetFieldBuilder();
- getExecFieldBuilder();
}
}
private static Builder create() {
@@ -20538,12 +18196,6 @@ public final class ClientProtos {
getBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
- if (execBuilder_ == null) {
- exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance();
- } else {
- execBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
@@ -20598,14 +18250,6 @@ public final class ClientProtos {
} else {
result.get_ = getBuilder_.build();
}
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
- to_bitField0_ |= 0x00000004;
- }
- if (execBuilder_ == null) {
- result.exec_ = exec_;
- } else {
- result.exec_ = execBuilder_.build();
- }
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
@@ -20628,9 +18272,6 @@ public final class ClientProtos {
if (other.hasGet()) {
mergeGet(other.getGet());
}
- if (other.hasExec()) {
- mergeExec(other.getExec());
- }
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
@@ -20648,12 +18289,6 @@ public final class ClientProtos {
return false;
}
}
- if (hasExec()) {
- if (!getExec().isInitialized()) {
-
- return false;
- }
- }
return true;
}
@@ -20698,15 +18333,6 @@ public final class ClientProtos {
setGet(subBuilder.buildPartial());
break;
}
- case 26: {
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder subBuilder = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder();
- if (hasExec()) {
- subBuilder.mergeFrom(getExec());
- }
- input.readMessage(subBuilder, extensionRegistry);
- setExec(subBuilder.buildPartial());
- break;
- }
}
}
}
@@ -20893,96 +18519,6 @@ public final class ClientProtos {
return getBuilder_;
}
- // optional .Exec exec = 3;
- private org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder> execBuilder_;
- public boolean hasExec() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec getExec() {
- if (execBuilder_ == null) {
- return exec_;
- } else {
- return execBuilder_.getMessage();
- }
- }
- public Builder setExec(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) {
- if (execBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- exec_ = value;
- onChanged();
- } else {
- execBuilder_.setMessage(value);
- }
- bitField0_ |= 0x00000004;
- return this;
- }
- public Builder setExec(
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder builderForValue) {
- if (execBuilder_ == null) {
- exec_ = builderForValue.build();
- onChanged();
- } else {
- execBuilder_.setMessage(builderForValue.build());
- }
- bitField0_ |= 0x00000004;
- return this;
- }
- public Builder mergeExec(org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec value) {
- if (execBuilder_ == null) {
- if (((bitField0_ & 0x00000004) == 0x00000004) &&
- exec_ != org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance()) {
- exec_ =
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.newBuilder(exec_).mergeFrom(value).buildPartial();
- } else {
- exec_ = value;
- }
- onChanged();
- } else {
- execBuilder_.mergeFrom(value);
- }
- bitField0_ |= 0x00000004;
- return this;
- }
- public Builder clearExec() {
- if (execBuilder_ == null) {
- exec_ = org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.getDefaultInstance();
- onChanged();
- } else {
- execBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000004);
- return this;
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder getExecBuilder() {
- bitField0_ |= 0x00000004;
- onChanged();
- return getExecFieldBuilder().getBuilder();
- }
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder getExecOrBuilder() {
- if (execBuilder_ != null) {
- return execBuilder_.getMessageOrBuilder();
- } else {
- return exec_;
- }
- }
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>
- getExecFieldBuilder() {
- if (execBuilder_ == null) {
- execBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder, org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecOrBuilder>(
- exec_,
- getParentForChildren(),
- isClean());
- exec_ = null;
- }
- return execBuilder_;
- }
-
// @@protoc_insertion_point(builder_scope:MultiAction)
}
@@ -23085,11 +20621,6 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
com.google.protobuf.RpcCallback done);
- public abstract void execCoprocessor(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request,
- com.google.protobuf.RpcCallback done);
-
public abstract void execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
@@ -23153,14 +20684,6 @@ public final class ClientProtos {
impl.bulkLoadHFile(controller, request, done);
}
- @java.lang.Override
- public void execCoprocessor(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request,
- com.google.protobuf.RpcCallback done) {
- impl.execCoprocessor(controller, request, done);
- }
-
@java.lang.Override
public void execService(
com.google.protobuf.RpcController controller,
@@ -23212,10 +20735,8 @@ public final class ClientProtos {
case 5:
return impl.bulkLoadHFile(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest)request);
case 6:
- return impl.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request);
- case 7:
return impl.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request);
- case 8:
+ case 7:
return impl.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request);
default:
throw new java.lang.AssertionError("Can't get here.");
@@ -23244,10 +20765,8 @@ public final class ClientProtos {
case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
case 6:
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance();
- case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
- case 8:
+ case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
@@ -23276,10 +20795,8 @@ public final class ClientProtos {
case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
case 6:
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance();
- case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
- case 8:
+ case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
@@ -23319,11 +20836,6 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request,
com.google.protobuf.RpcCallback done);
- public abstract void execCoprocessor(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request,
- com.google.protobuf.RpcCallback done);
-
public abstract void execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
@@ -23387,16 +20899,11 @@ public final class ClientProtos {
done));
return;
case 6:
- this.execCoprocessor(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest)request,
- com.google.protobuf.RpcUtil.specializeCallback(
- done));
- return;
- case 7:
this.execService(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
return;
- case 8:
+ case 7:
this.multi(controller, (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest)request,
com.google.protobuf.RpcUtil.specializeCallback(
done));
@@ -23428,10 +20935,8 @@ public final class ClientProtos {
case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest.getDefaultInstance();
case 6:
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.getDefaultInstance();
- case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.getDefaultInstance();
- case 8:
+ case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
@@ -23460,10 +20965,8 @@ public final class ClientProtos {
case 5:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance();
case 6:
- return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance();
- case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance();
- case 8:
+ case 7:
return org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance();
default:
throw new java.lang.AssertionError("Can't get here.");
@@ -23576,27 +21079,12 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.getDefaultInstance()));
}
- public void execCoprocessor(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request,
- com.google.protobuf.RpcCallback done) {
- channel.callMethod(
- getDescriptor().getMethods().get(6),
- controller,
- request,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance(),
- com.google.protobuf.RpcUtil.generalizeCallback(
- done,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance()));
- }
-
public void execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
- getDescriptor().getMethods().get(7),
+ getDescriptor().getMethods().get(6),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance(),
@@ -23611,7 +21099,7 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request,
com.google.protobuf.RpcCallback done) {
channel.callMethod(
- getDescriptor().getMethods().get(8),
+ getDescriptor().getMethods().get(7),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance(),
@@ -23658,11 +21146,6 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileRequest request)
throws com.google.protobuf.ServiceException;
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request)
- throws com.google.protobuf.ServiceException;
-
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
@@ -23753,24 +21236,12 @@ public final class ClientProtos {
}
- public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse execCoprocessor(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest request)
- throws com.google.protobuf.ServiceException {
- return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse) channel.callBlockingMethod(
- getDescriptor().getMethods().get(6),
- controller,
- request,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.getDefaultInstance());
- }
-
-
public org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse execService(
com.google.protobuf.RpcController controller,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse) channel.callBlockingMethod(
- getDescriptor().getMethods().get(7),
+ getDescriptor().getMethods().get(6),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.getDefaultInstance());
@@ -23782,7 +21253,7 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest request)
throws com.google.protobuf.ServiceException {
return (org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse) channel.callBlockingMethod(
- getDescriptor().getMethods().get(8),
+ getDescriptor().getMethods().get(7),
controller,
request,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiResponse.getDefaultInstance());
@@ -23896,21 +21367,6 @@ public final class ClientProtos {
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_BulkLoadHFileResponse_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_Exec_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_Exec_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_ExecCoprocessorRequest_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_ExecCoprocessorRequest_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_ExecCoprocessorResponse_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_ExecCoprocessorResponse_fieldAccessorTable;
private static com.google.protobuf.Descriptors.Descriptor
internal_static_CoprocessorServiceCall_descriptor;
private static
@@ -24014,43 +21470,34 @@ public final class ClientProtos {
"amilyPath\030\002 \003(\0132 .BulkLoadHFileRequest.F" +
"amilyPath\022\024\n\014assignSeqNum\030\003 \001(\010\032*\n\nFamil" +
"yPath\022\016\n\006family\030\001 \002(\014\022\014\n\004path\030\002 \002(\t\"\'\n\025B" +
- "ulkLoadHFileResponse\022\016\n\006loaded\030\001 \002(\010\"\203\001\n",
- "\004Exec\022\013\n\003row\030\001 \002(\014\022\024\n\014protocolName\030\002 \002(\t" +
- "\022\022\n\nmethodName\030\003 \002(\t\022!\n\010property\030\004 \003(\0132\017" +
- ".NameStringPair\022!\n\tparameter\030\005 \003(\0132\016.Nam" +
- "eBytesPair\"O\n\026ExecCoprocessorRequest\022 \n\006" +
- "region\030\001 \002(\0132\020.RegionSpecifier\022\023\n\004call\030\002" +
- " \002(\0132\005.Exec\"8\n\027ExecCoprocessorResponse\022\035" +
- "\n\005value\030\001 \002(\0132\016.NameBytesPair\"_\n\026Coproce" +
- "ssorServiceCall\022\013\n\003row\030\001 \002(\014\022\023\n\013serviceN" +
- "ame\030\002 \002(\t\022\022\n\nmethodName\030\003 \002(\t\022\017\n\007request" +
- "\030\004 \002(\014\"d\n\031CoprocessorServiceRequest\022 \n\006r",
- "egion\030\001 \002(\0132\020.RegionSpecifier\022%\n\004call\030\002 " +
- "\002(\0132\027.CoprocessorServiceCall\"]\n\032Coproces" +
- "sorServiceResponse\022 \n\006region\030\001 \002(\0132\020.Reg" +
- "ionSpecifier\022\035\n\005value\030\002 \002(\0132\016.NameBytesP" +
- "air\"N\n\013MultiAction\022\027\n\006mutate\030\001 \001(\0132\007.Mut" +
- "ate\022\021\n\003get\030\002 \001(\0132\004.Get\022\023\n\004exec\030\003 \001(\0132\005.E" +
- "xec\"P\n\014ActionResult\022\035\n\005value\030\001 \001(\0132\016.Nam" +
- "eBytesPair\022!\n\texception\030\002 \001(\0132\016.NameByte" +
- "sPair\"^\n\014MultiRequest\022 \n\006region\030\001 \002(\0132\020." +
- "RegionSpecifier\022\034\n\006action\030\002 \003(\0132\014.MultiA",
- "ction\022\016\n\006atomic\030\003 \001(\010\".\n\rMultiResponse\022\035" +
- "\n\006result\030\001 \003(\0132\r.ActionResult2\331\003\n\rClient" +
- "Service\022 \n\003get\022\013.GetRequest\032\014.GetRespons" +
- "e\022)\n\006mutate\022\016.MutateRequest\032\017.MutateResp" +
- "onse\022#\n\004scan\022\014.ScanRequest\032\r.ScanRespons" +
- "e\022,\n\007lockRow\022\017.LockRowRequest\032\020.LockRowR" +
- "esponse\0222\n\tunlockRow\022\021.UnlockRowRequest\032" +
- "\022.UnlockRowResponse\022>\n\rbulkLoadHFile\022\025.B" +
- "ulkLoadHFileRequest\032\026.BulkLoadHFileRespo" +
- "nse\022D\n\017execCoprocessor\022\027.ExecCoprocessor",
- "Request\032\030.ExecCoprocessorResponse\022F\n\013exe" +
- "cService\022\032.CoprocessorServiceRequest\032\033.C" +
- "oprocessorServiceResponse\022&\n\005multi\022\r.Mul" +
- "tiRequest\032\016.MultiResponseBB\n*org.apache." +
- "hadoop.hbase.protobuf.generatedB\014ClientP" +
- "rotosH\001\210\001\001\240\001\001"
+ "ulkLoadHFileResponse\022\016\n\006loaded\030\001 \002(\010\"_\n\026",
+ "CoprocessorServiceCall\022\013\n\003row\030\001 \002(\014\022\023\n\013s" +
+ "erviceName\030\002 \002(\t\022\022\n\nmethodName\030\003 \002(\t\022\017\n\007" +
+ "request\030\004 \002(\014\"d\n\031CoprocessorServiceReque" +
+ "st\022 \n\006region\030\001 \002(\0132\020.RegionSpecifier\022%\n\004" +
+ "call\030\002 \002(\0132\027.CoprocessorServiceCall\"]\n\032C" +
+ "oprocessorServiceResponse\022 \n\006region\030\001 \002(" +
+ "\0132\020.RegionSpecifier\022\035\n\005value\030\002 \002(\0132\016.Nam" +
+ "eBytesPair\"9\n\013MultiAction\022\027\n\006mutate\030\001 \001(" +
+ "\0132\007.Mutate\022\021\n\003get\030\002 \001(\0132\004.Get\"P\n\014ActionR" +
+ "esult\022\035\n\005value\030\001 \001(\0132\016.NameBytesPair\022!\n\t",
+ "exception\030\002 \001(\0132\016.NameBytesPair\"^\n\014Multi" +
+ "Request\022 \n\006region\030\001 \002(\0132\020.RegionSpecifie" +
+ "r\022\034\n\006action\030\002 \003(\0132\014.MultiAction\022\016\n\006atomi" +
+ "c\030\003 \001(\010\".\n\rMultiResponse\022\035\n\006result\030\001 \003(\013" +
+ "2\r.ActionResult2\223\003\n\rClientService\022 \n\003get" +
+ "\022\013.GetRequest\032\014.GetResponse\022)\n\006mutate\022\016." +
+ "MutateRequest\032\017.MutateResponse\022#\n\004scan\022\014" +
+ ".ScanRequest\032\r.ScanResponse\022,\n\007lockRow\022\017" +
+ ".LockRowRequest\032\020.LockRowResponse\0222\n\tunl" +
+ "ockRow\022\021.UnlockRowRequest\032\022.UnlockRowRes",
+ "ponse\022>\n\rbulkLoadHFile\022\025.BulkLoadHFileRe" +
+ "quest\032\026.BulkLoadHFileResponse\022F\n\013execSer" +
+ "vice\022\032.CoprocessorServiceRequest\032\033.Copro" +
+ "cessorServiceResponse\022&\n\005multi\022\r.MultiRe" +
+ "quest\032\016.MultiResponseBB\n*org.apache.hado" +
+ "op.hbase.protobuf.generatedB\014ClientProto" +
+ "sH\001\210\001\001\240\001\001"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
@@ -24225,32 +21672,8 @@ public final class ClientProtos {
new java.lang.String[] { "Loaded", },
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.BulkLoadHFileResponse.Builder.class);
- internal_static_Exec_descriptor =
- getDescriptor().getMessageTypes().get(18);
- internal_static_Exec_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_Exec_descriptor,
- new java.lang.String[] { "Row", "ProtocolName", "MethodName", "Property", "Parameter", },
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.class,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Exec.Builder.class);
- internal_static_ExecCoprocessorRequest_descriptor =
- getDescriptor().getMessageTypes().get(19);
- internal_static_ExecCoprocessorRequest_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_ExecCoprocessorRequest_descriptor,
- new java.lang.String[] { "Region", "Call", },
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.class,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest.Builder.class);
- internal_static_ExecCoprocessorResponse_descriptor =
- getDescriptor().getMessageTypes().get(20);
- internal_static_ExecCoprocessorResponse_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_ExecCoprocessorResponse_descriptor,
- new java.lang.String[] { "Value", },
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.class,
- org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse.Builder.class);
internal_static_CoprocessorServiceCall_descriptor =
- getDescriptor().getMessageTypes().get(21);
+ getDescriptor().getMessageTypes().get(18);
internal_static_CoprocessorServiceCall_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CoprocessorServiceCall_descriptor,
@@ -24258,7 +21681,7 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall.Builder.class);
internal_static_CoprocessorServiceRequest_descriptor =
- getDescriptor().getMessageTypes().get(22);
+ getDescriptor().getMessageTypes().get(19);
internal_static_CoprocessorServiceRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CoprocessorServiceRequest_descriptor,
@@ -24266,7 +21689,7 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest.Builder.class);
internal_static_CoprocessorServiceResponse_descriptor =
- getDescriptor().getMessageTypes().get(23);
+ getDescriptor().getMessageTypes().get(20);
internal_static_CoprocessorServiceResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_CoprocessorServiceResponse_descriptor,
@@ -24274,15 +21697,15 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse.Builder.class);
internal_static_MultiAction_descriptor =
- getDescriptor().getMessageTypes().get(24);
+ getDescriptor().getMessageTypes().get(21);
internal_static_MultiAction_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiAction_descriptor,
- new java.lang.String[] { "Mutate", "Get", "Exec", },
+ new java.lang.String[] { "Mutate", "Get", },
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiAction.Builder.class);
internal_static_ActionResult_descriptor =
- getDescriptor().getMessageTypes().get(25);
+ getDescriptor().getMessageTypes().get(22);
internal_static_ActionResult_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_ActionResult_descriptor,
@@ -24290,7 +21713,7 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ActionResult.Builder.class);
internal_static_MultiRequest_descriptor =
- getDescriptor().getMessageTypes().get(26);
+ getDescriptor().getMessageTypes().get(23);
internal_static_MultiRequest_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiRequest_descriptor,
@@ -24298,7 +21721,7 @@ public final class ClientProtos {
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.class,
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest.Builder.class);
internal_static_MultiResponse_descriptor =
- getDescriptor().getMessageTypes().get(27);
+ getDescriptor().getMessageTypes().get(24);
internal_static_MultiResponse_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_MultiResponse_descriptor,
diff --git a/hbase-protocol/src/main/protobuf/Client.proto b/hbase-protocol/src/main/protobuf/Client.proto
index de1aff56312..7a3e60e0165 100644
--- a/hbase-protocol/src/main/protobuf/Client.proto
+++ b/hbase-protocol/src/main/protobuf/Client.proto
@@ -256,45 +256,6 @@ message BulkLoadHFileResponse {
required bool loaded = 1;
}
-/**
- * An individual coprocessor call. You must specify the protocol,
- * the method, and the row to which the call will be executed.
- *
- * You can specify the configuration settings in the property list.
- *
- * The parameter list has the parameters used for the method.
- * A parameter is a pair of parameter name and the binary parameter
- * value. The name is the parameter class name. The value is the
- * binary format of the parameter, for example, protocol buffer
- * encoded value.
- */
-message Exec {
- required bytes row = 1;
- required string protocolName = 2;
- required string methodName = 3;
- repeated NameStringPair property = 4;
- repeated NameBytesPair parameter = 5;
-}
-
- /**
- * Executes a single {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol}
- * method using the registered protocol handlers.
- * {@link CoprocessorProtocol} implementations must be registered via the
- * {@link org.apache.hadoop.hbase.regionserver.HRegion#registerProtocol(
- * Class, org.apache.hadoop.hbase.ipc.CoprocessorProtocol)}
- * method before they are available.
- * @deprecated Use CoprocessorService going forward
- */
-message ExecCoprocessorRequest {
- required RegionSpecifier region = 1;
- required Exec call = 2;
-}
-
-// @deprecated Use CoprocessorService going forward
-message ExecCoprocessorResponse {
- required NameBytesPair value = 1;
-}
-
message CoprocessorServiceCall {
required bytes row = 1;
required string serviceName = 2;
@@ -319,7 +280,6 @@ message CoprocessorServiceResponse {
message MultiAction {
optional Mutate mutate = 1;
optional Get get = 2;
- optional Exec exec = 3;
}
/**
@@ -372,9 +332,6 @@ service ClientService {
rpc bulkLoadHFile(BulkLoadHFileRequest)
returns(BulkLoadHFileResponse);
- rpc execCoprocessor(ExecCoprocessorRequest)
- returns(ExecCoprocessorResponse);
-
rpc execService(CoprocessorServiceRequest)
returns(CoprocessorServiceResponse);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnection.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnection.java
index c750e8160a8..9456b9c93e3 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnection.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnection.java
@@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.catalog.CatalogTracker;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
/**
@@ -299,37 +298,6 @@ public interface HConnection extends Abortable, Closeable {
Object[] results,
Batch.Callback callback) throws IOException, InterruptedException;
-
- /**
- * Executes the given
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call}
- * callable for each row in the given list and invokes
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)}
- * for each result returned.
- *
- * @param protocol the protocol interface being called
- * @param rows a list of row keys for which the callable should be invoked
- * @param tableName table name for the coprocessor invoked
- * @param pool ExecutorService used to submit the calls per row
- * @param call instance on which to invoke
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)}
- * for each row
- * @param callback instance on which to invoke
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)}
- * for each result
- * @param the protocol interface type
- * @param the callable's return type
- * @throws IOException
- * @deprecated CoprocessorProtocol replaced by CoprocessorService calls.
- */
- public void processExecs(
- final Class protocol,
- List rows,
- final byte[] tableName,
- ExecutorService pool,
- final Batch.Call call,
- final Batch.Callback callback) throws IOException, Throwable;
-
/**
* Enable or disable region cache prefetch for the table. It will be
* applied for the given table's all HTable instances within this
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
index 447a64ec5be..ecb487e922f 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HConnectionManager.java
@@ -37,7 +37,6 @@ import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
-import java.util.TreeMap;
import java.util.concurrent.Callable;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.CopyOnWriteArraySet;
@@ -73,8 +72,6 @@ import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitor;
import org.apache.hadoop.hbase.client.MetaScanner.MetaScannerVisitorBase;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
-import org.apache.hadoop.hbase.ipc.ExecRPCInvoker;
import org.apache.hadoop.hbase.ipc.HBaseRPC;
import org.apache.hadoop.hbase.ipc.VersionedProtocol;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
@@ -2124,75 +2121,6 @@ public class HConnectionManager {
}
}
-
- /**
- * Executes the given
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call}
- * callable for each row in the
- * given list and invokes
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)}
- * for each result returned.
- *
- * @param protocol the protocol interface being called
- * @param rows a list of row keys for which the callable should be invoked
- * @param tableName table name for the coprocessor invoked
- * @param pool ExecutorService used to submit the calls per row
- * @param callable instance on which to invoke
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)}
- * for each row
- * @param callback instance on which to invoke
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)}
- * for each result
- * @param the protocol interface type
- * @param the callable's return type
- * @throws IOException
- */
- @Deprecated
- public void processExecs(
- final Class protocol,
- List rows,
- final byte[] tableName,
- ExecutorService pool,
- final Batch.Call callable,
- final Batch.Callback callback)
- throws IOException, Throwable {
-
- Map> futures =
- new TreeMap>(Bytes.BYTES_COMPARATOR);
- for (final byte[] r : rows) {
- final ExecRPCInvoker invoker =
- new ExecRPCInvoker(conf, this, protocol, tableName, r);
- Future future = pool.submit(
- new Callable() {
- public R call() throws Exception {
- T instance = (T)Proxy.newProxyInstance(conf.getClassLoader(),
- new Class[]{protocol},
- invoker);
- R result = callable.call(instance);
- byte[] region = invoker.getRegionName();
- if (callback != null) {
- callback.update(region, r, result);
- }
- return result;
- }
- });
- futures.put(r, future);
- }
- for (Map.Entry> e : futures.entrySet()) {
- try {
- e.getValue().get();
- } catch (ExecutionException ee) {
- LOG.warn("Error executing for row "+Bytes.toStringBinary(e.getKey()), ee);
- throw ee.getCause();
- } catch (InterruptedException ie) {
- Thread.currentThread().interrupt();
- throw new IOException("Interrupted executing for row " +
- Bytes.toStringBinary(e.getKey()), ie);
- }
- }
- }
-
-
/*
* Return the number of cached region for a table. It will only be called
* from a unit test.
@@ -2210,8 +2138,6 @@ public class HConnectionManager {
}
}
-
-
/**
* Check the region cache to see whether a region is cached yet or not.
* Called by unit tests.
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTable.java
index 9d31e4c1f30..17d7e159519 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTable.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTable.java
@@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.client;
import java.io.Closeable;
import java.io.IOException;
import java.io.InterruptedIOException;
-import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
@@ -52,9 +51,7 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.client.HConnectionManager.HConnectable;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.filter.BinaryComparator;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
-import org.apache.hadoop.hbase.ipc.ExecRPCInvoker;
import org.apache.hadoop.hbase.ipc.RegionCoprocessorRpcChannel;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.protobuf.RequestConverter;
@@ -1191,22 +1188,6 @@ public class HTable implements HTableInterface {
this.connection.clearRegionCache();
}
- /**
- * {@inheritDoc}
- */
- @Override
- @Deprecated
- public T coprocessorProxy(
- Class protocol, byte[] row) {
- return (T)Proxy.newProxyInstance(this.getClass().getClassLoader(),
- new Class[]{protocol},
- new ExecRPCInvoker(configuration,
- connection,
- protocol,
- tableName,
- row));
- }
-
/**
* {@inheritDoc}
*/
@@ -1214,43 +1195,6 @@ public class HTable implements HTableInterface {
return new RegionCoprocessorRpcChannel(connection, tableName, row);
}
- /**
- * {@inheritDoc}
- */
- @Override
- @Deprecated
- public Map coprocessorExec(
- Class protocol, byte[] startKey, byte[] endKey,
- Batch.Call callable)
- throws IOException, Throwable {
-
- final Map results = Collections.synchronizedMap(new TreeMap(
- Bytes.BYTES_COMPARATOR));
- coprocessorExec(protocol, startKey, endKey, callable,
- new Batch.Callback(){
- public void update(byte[] region, byte[] row, R value) {
- results.put(region, value);
- }
- });
- return results;
- }
-
- /**
- * {@inheritDoc}
- */
- @Override
- @Deprecated
- public void coprocessorExec(
- Class protocol, byte[] startKey, byte[] endKey,
- Batch.Call callable, Batch.Callback callback)
- throws IOException, Throwable {
-
- // get regions covered by the row range
- List keys = getStartKeysInRange(startKey, endKey);
- connection.processExecs(protocol, keys, tableName, pool, callable,
- callback);
- }
-
/**
* {@inheritDoc}
*/
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
index ec985d90451..c5fc356546c 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java
@@ -31,7 +31,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
/**
@@ -415,95 +414,6 @@ public interface HTableInterface extends Closeable {
*/
void unlockRow(RowLock rl) throws IOException;
- /**
- * Creates and returns a proxy to the CoprocessorProtocol instance running in the
- * region containing the specified row. The row given does not actually have
- * to exist. Whichever region would contain the row based on start and end keys will
- * be used. Note that the {@code row} parameter is also not passed to the
- * coprocessor handler registered for this protocol, unless the {@code row}
- * is separately passed as an argument in a proxy method call. The parameter
- * here is just used to locate the region used to handle the call.
- *
- * @param protocol The class or interface defining the remote protocol
- * @param row The row key used to identify the remote region location
- * @return A CoprocessorProtocol instance
- * @deprecated since 0.96. Use {@link HTableInterface#coprocessorService(byte[])} instead.
- */
- @Deprecated
- T coprocessorProxy(Class protocol, byte[] row);
-
- /**
- * Invoke the passed
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call} against
- * the {@link CoprocessorProtocol} instances running in the selected regions.
- * All regions beginning with the region containing the startKey
- * row, through to the region containing the endKey row (inclusive)
- * will be used. If startKey or endKey is
- * null, the first and last regions in the table, respectively,
- * will be used in the range selection.
- *
- * @param protocol the CoprocessorProtocol implementation to call
- * @param startKey start region selection with region containing this row
- * @param endKey select regions up to and including the region containing
- * this row
- * @param callable wraps the CoprocessorProtocol implementation method calls
- * made per-region
- * @param CoprocessorProtocol subclass for the remote invocation
- * @param Return type for the
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)}
- * method
- * @return a Map of region names to
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} return values
- *
- * @deprecated since 0.96. Use
- * {@link HTableInterface#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)} instead.
- */
- @Deprecated
- Map coprocessorExec(
- Class protocol, byte[] startKey, byte[] endKey, Batch.Call callable)
- throws IOException, Throwable;
-
- /**
- * Invoke the passed
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call} against
- * the {@link CoprocessorProtocol} instances running in the selected regions.
- * All regions beginning with the region containing the startKey
- * row, through to the region containing the endKey row
- * (inclusive)
- * will be used. If startKey or endKey is
- * null, the first and last regions in the table, respectively,
- * will be used in the range selection.
- *
- *
- * For each result, the given
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)}
- * method will be called.
- *
- *
- * @param protocol the CoprocessorProtocol implementation to call
- * @param startKey start region selection with region containing this row
- * @param endKey select regions up to and including the region containing
- * this row
- * @param callable wraps the CoprocessorProtocol implementation method calls
- * made per-region
- * @param callback an instance upon which
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)} with the
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)}
- * return value for each region
- * @param CoprocessorProtocol subclass for the remote invocation
- * @param Return type for the
- * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)}
- * method
- *
- * @deprecated since 0.96.
- * Use {@link HTableInterface#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)} instead.
- */
- @Deprecated
- void coprocessorExec(
- Class protocol, byte[] startKey, byte[] endKey,
- Batch.Call callable, Batch.Callback callback)
- throws IOException, Throwable;
-
/**
* Creates and returns a {@link com.google.protobuf.RpcChannel} instance connected to the
* table region containing the specified row. The row given does not actually have
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
index af9d49adbb6..ef9516fbe61 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTablePool.java
@@ -33,7 +33,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.PoolMap;
@@ -474,30 +473,6 @@ public class HTablePool implements Closeable {
table.unlockRow(rl);
}
- @Override
- @Deprecated
- public T coprocessorProxy(
- Class protocol, byte[] row) {
- return table.coprocessorProxy(protocol, row);
- }
-
- @Override
- @Deprecated
- public Map coprocessorExec(
- Class protocol, byte[] startKey, byte[] endKey,
- Batch.Call callable) throws IOException, Throwable {
- return table.coprocessorExec(protocol, startKey, endKey, callable);
- }
-
- @Override
- @Deprecated
- public void coprocessorExec(
- Class protocol, byte[] startKey, byte[] endKey,
- Batch.Call callable, Batch.Callback callback)
- throws IOException, Throwable {
- table.coprocessorExec(protocol, startKey, endKey, callable, callback);
- }
-
@Override
public CoprocessorRpcChannel coprocessorService(byte[] row) {
return table.coprocessorService(row);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
index ed150cfe532..9ca4de6be2b 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Batch.java
@@ -19,18 +19,10 @@
package org.apache.hadoop.hbase.client.coprocessor;
-import org.apache.commons.lang.reflect.MethodUtils;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import java.io.IOException;
+
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
-
-import java.io.IOException;
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.InvocationTargetException;
-import java.lang.reflect.Method;
-import java.lang.reflect.Proxy;
/**
@@ -40,95 +32,6 @@ import java.lang.reflect.Proxy;
@InterfaceAudience.Public
@InterfaceStability.Evolving
public abstract class Batch {
- private static Log LOG = LogFactory.getLog(Batch.class);
-
- /**
- * Creates a new {@link Batch.Call} instance that invokes a method
- * with the given parameters and returns the result.
- *
- *
- * Note that currently the method is naively looked up using the method name
- * and class types of the passed arguments, which means that
- * none of the arguments can be null.
- * For more flexibility, see
- * {@link Batch#forMethod(java.lang.reflect.Method, Object...)}.
- *
- *
- * @param protocol the protocol class being called
- * @param method the method name
- * @param args zero or more arguments to be passed to the method
- * (individual args cannot be null!)
- * @param the class type of the protocol implementation being invoked
- * @param the return type for the method call
- * @return a {@code Callable} instance that will invoke the given method
- * and return the results
- * @throws NoSuchMethodException if the method named, with the given argument
- * types, cannot be found in the protocol class
- * @see Batch#forMethod(java.lang.reflect.Method, Object...)
- * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)
- */
- @Deprecated
- public static Call forMethod(
- final Class protocol, final String method, final Object... args)
- throws NoSuchMethodException {
- Class[] types = new Class[args.length];
- for (int i=0; i the class type of the protocol implementation being invoked
- * @param the return type for the method call
- * @return a {@code Callable} instance that will invoke the given method and
- * return the results
- * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)
- */
- @Deprecated
- public static Call forMethod(
- final Method method, final Object... args) {
- return new Call() {
- public R call(T instance) throws IOException {
- try {
- if (Proxy.isProxyClass(instance.getClass())) {
- InvocationHandler invoker = Proxy.getInvocationHandler(instance);
- return (R)invoker.invoke(instance, method, args);
- } else {
- LOG.warn("Non proxied invocation of method '"+method.getName()+"'!");
- return (R)method.invoke(instance, args);
- }
- }
- catch (IllegalAccessException iae) {
- throw new IOException("Unable to invoke method '"+
- method.getName()+"'", iae);
- }
- catch (InvocationTargetException ite) {
- throw new IOException(ite.toString(), ite);
- }
- catch (Throwable t) {
- throw new IOException(t.toString(), t);
- }
- }
- };
- }
-
/**
* Defines a unit of work to be executed.
*
@@ -168,4 +71,4 @@ public abstract class Batch {
public static interface Callback {
public void update(byte[] region, byte[] row, R result);
}
-}
+}
\ No newline at end of file
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Exec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Exec.java
deleted file mode 100644
index 72c3395fd9a..00000000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/Exec.java
+++ /dev/null
@@ -1,133 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.client.coprocessor;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.HBaseConfiguration;
-import org.apache.hadoop.hbase.client.Row;
-import org.apache.hadoop.hbase.io.HbaseObjectWritable;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
-import org.apache.hadoop.hbase.ipc.Invocation;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.Classes;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.lang.reflect.Method;
-
-/**
- * Represents an arbitrary method invocation against a Coprocessor
- * instance. In order for a coprocessor implementation to be remotely callable
- * by clients, it must define and implement a {@link CoprocessorProtocol}
- * subclass. Only methods defined in the {@code CoprocessorProtocol} interface
- * will be callable by clients.
- *
- *
- * This class is used internally by
- * {@link org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)}
- * to wrap the {@code CoprocessorProtocol} method invocations requested in
- * RPC calls. It should not be used directly by HBase clients.
- *
- *
- * @see ExecResult
- * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
- * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)
- * @deprecated since 0.96.0. See {@link org.apache.hadoop.hbase.client.HTable#coprocessorService(byte[])}
- * or related methods instead.
- */
-@Deprecated
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-public class Exec extends Invocation implements Row {
- /** Row key used as a reference for any region lookups */
- private byte[] referenceRow;
- private Class extends CoprocessorProtocol> protocol;
- private String protocolName;
-
- public Exec() {
- }
-
- public Exec(Configuration configuration,
- byte[] row,
- Class extends CoprocessorProtocol> protocol,
- Method method, Object[] parameters) {
- super(method, parameters);
- this.conf = configuration;
- this.referenceRow = row;
- this.protocol = protocol;
- this.protocolName = protocol.getName();
- }
-
- public String getProtocolName() {
- return protocolName;
- }
-
- public Class extends CoprocessorProtocol> getProtocol() {
- return protocol;
- }
-
- public byte[] getRow() {
- return referenceRow;
- }
-
- public int compareTo(Row row) {
- return Bytes.compareTo(referenceRow, row.getRow());
- }
-
- @Override
- public void write(DataOutput out) throws IOException {
- // fields for Invocation
- out.writeUTF(this.methodName);
- out.writeInt(parameterClasses.length);
- for (int i = 0; i < parameterClasses.length; i++) {
- HbaseObjectWritable.writeObject(out, parameters[i],
- parameters[i] != null ? parameters[i].getClass() : parameterClasses[i],
- conf);
- out.writeUTF(parameterClasses[i].getName());
- }
- // fields for Exec
- Bytes.writeByteArray(out, referenceRow);
- out.writeUTF(protocol.getName());
- }
-
- @Override
- public void readFields(DataInput in) throws IOException {
- // fields for Invocation
- methodName = in.readUTF();
- parameters = new Object[in.readInt()];
- parameterClasses = new Class[parameters.length];
- HbaseObjectWritable objectWritable = new HbaseObjectWritable();
- for (int i = 0; i < parameters.length; i++) {
- parameters[i] = HbaseObjectWritable.readObject(in, objectWritable,
- this.conf);
- String parameterClassName = in.readUTF();
- try {
- parameterClasses[i] = Classes.extendedForName(parameterClassName);
- } catch (ClassNotFoundException e) {
- throw new IOException("Couldn't find class: " + parameterClassName);
- }
- }
- // fields for Exec
- referenceRow = Bytes.readByteArray(in);
- protocolName = in.readUTF();
- }
-}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/ExecResult.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/ExecResult.java
deleted file mode 100644
index 687eabef3c5..00000000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/coprocessor/ExecResult.java
+++ /dev/null
@@ -1,87 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.client.coprocessor;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hbase.io.HbaseObjectWritable;
-import org.apache.hadoop.hbase.util.Bytes;
-import org.apache.hadoop.hbase.util.Classes;
-import org.apache.hadoop.io.Writable;
-
-import java.io.DataInput;
-import java.io.DataOutput;
-import java.io.IOException;
-import java.io.Serializable;
-
-/**
- * Represents the return value from a
- * {@link org.apache.hadoop.hbase.client.coprocessor.Exec} invocation.
- * This simply wraps the value for easier
- * {@link org.apache.hadoop.hbase.io.HbaseObjectWritable}
- * serialization.
- *
- *
- * This class is used internally by the HBase client code to properly serialize
- * responses from {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol}
- * method invocations. It should not be used directly by clients.
- *
- *
- * @see Exec
- * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
- * @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)
- * @deprecated since 0.96.0. See {@link org.apache.hadoop.hbase.client.HTable#coprocessorService(byte[])}
- * or related methods instead.
- */
-@Deprecated
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-public class ExecResult implements Writable {
- private byte[] regionName;
- private Object value;
-
- public ExecResult() {
- }
-
- public ExecResult(byte[] region, Object value) {
- this.regionName = region;
- this.value = value;
- }
-
- public byte[] getRegionName() {
- return regionName;
- }
-
- public Object getValue() {
- return value;
- }
-
- @Override
- public void write(DataOutput out) throws IOException {
- Bytes.writeByteArray(out, regionName);
- HbaseObjectWritable.writeObject(out, value,
- value != null ? value.getClass() : Writable.class, null);
- }
-
- @Override
- public void readFields(DataInput in) throws IOException {
- regionName = Bytes.readByteArray(in);
- value = HbaseObjectWritable.readObject(in, null);
- }
-}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java
deleted file mode 100644
index ccc32b522c8..00000000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseEndpointCoprocessor.java
+++ /dev/null
@@ -1,81 +0,0 @@
-/*
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.hbase.coprocessor;
-
-import java.io.IOException;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-import org.apache.hadoop.hbase.Coprocessor;
-import org.apache.hadoop.hbase.CoprocessorEnvironment;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
-import org.apache.hadoop.hbase.ipc.ProtocolSignature;
-import org.apache.hadoop.hbase.ipc.VersionedProtocol;
-
-/**
- * This abstract class provides default implementation of an Endpoint.
- * It also maintains a CoprocessorEnvironment object which can be
- * used to access region resource.
- *
- * It's recommended to use this abstract class to implement your Endpoint.
- * However you still can just implement the interface CoprocessorProtocol
- * and Coprocessor to develop an Endpoint. But you won't be able to access
- * the region related resource, i.e., CoprocessorEnvironment.
- * @deprecated CoprocessorProtocol is going away in 0.96
- */
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-public abstract class BaseEndpointCoprocessor implements Coprocessor,
- CoprocessorProtocol, VersionedProtocol {
- /**
- * This Interfaces' version. Version changes when the Interface changes.
- */
- // All HBase Interfaces used derive from HBaseRPCProtocolVersion. It
- // maintained a single global version number on all HBase Interfaces. This
- // meant all HBase RPC was broke though only one of the three RPC Interfaces
- // had changed. This has since been undone.
- public static final long VERSION = 28L;
-
- private CoprocessorEnvironment env;
-
- /**
- * @return env Coprocessor environment.
- */
- public CoprocessorEnvironment getEnvironment() {
- return env;
- }
-
- @Override
- public void start(CoprocessorEnvironment env) {
- this.env = env;
- }
-
- @Override
- public void stop(CoprocessorEnvironment env) { }
-
- @Override
- public ProtocolSignature getProtocolSignature(
- String protocol, long version, int clientMethodsHashCode)
- throws IOException {
- return new ProtocolSignature(VERSION, null);
- }
-
- @Override
- public long getProtocolVersion(String protocol, long clientVersion)
- throws IOException {
- return VERSION;
- }
-}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
index 2a3a69e7d04..671dfd63cfa 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/CoprocessorHost.java
@@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.*;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.SortedCopyOnWriteSet;
@@ -586,29 +585,6 @@ public abstract class CoprocessorHost {
return table.get(gets);
}
- @Override
- @Deprecated
- public void coprocessorExec(Class protocol,
- byte[] startKey, byte[] endKey, Batch.Call callable,
- Batch.Callback callback) throws IOException, Throwable {
- table.coprocessorExec(protocol, startKey, endKey, callable, callback);
- }
-
- @Override
- @Deprecated
- public Map coprocessorExec(
- Class protocol, byte[] startKey, byte[] endKey, Batch.Call callable)
- throws IOException, Throwable {
- return table.coprocessorExec(protocol, startKey, endKey, callable);
- }
-
- @Override
- @Deprecated
- public T coprocessorProxy(Class protocol,
- byte[] row) {
- return table.coprocessorProxy(protocol, row);
- }
-
@Override
public CoprocessorRpcChannel coprocessorService(byte[] row) {
return table.coprocessorService(row);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
index 35e5cd57529..fdaebbc4095 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HbaseObjectWritable.java
@@ -60,7 +60,6 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.coprocessor.Exec;
import org.apache.hadoop.hbase.filter.BinaryComparator;
import org.apache.hadoop.hbase.filter.BitComparator;
import org.apache.hadoop.hbase.filter.ColumnCountGetFilter;
@@ -238,7 +237,8 @@ public class HbaseObjectWritable implements Writable, WritableWithSize, Configur
addToMap(MultiResponse.class, code++);
// coprocessor execution
- addToMap(Exec.class, code++);
+ // Exec no longer exists --> addToMap(Exec.class, code++);
+ code++;
addToMap(Increment.class, code++);
addToMap(KeyOnlyFilter.class, code++);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorProtocol.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorProtocol.java
deleted file mode 100644
index fa95ec26378..00000000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/CoprocessorProtocol.java
+++ /dev/null
@@ -1,46 +0,0 @@
-/**
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.ipc;
-
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
-
-/**
- * All custom RPC protocols to be exported by Coprocessors must extend this interface.
- *
- *
- * Note that all callable methods must have a return type handled by
- * {@link org.apache.hadoop.hbase.io.HbaseObjectWritable#writeObject(java.io.DataOutput, Object, Class, org.apache.hadoop.conf.Configuration)}.
- * That is:
- *
- *
a Java primitive type ({@code int}, {@code float}, etc)
- *
a Java {@code String}
- *
a {@link org.apache.hadoop.io.Writable}
- *
an array or {@code java.util.List} of one of the above
- *
- *
- * @deprecated since 0.96. Use {@link org.apache.hadoop.hbase.coprocessor.CoprocessorService}
- * instead.
- */
-@InterfaceAudience.Public
-@InterfaceStability.Evolving
-@Deprecated
-public interface CoprocessorProtocol extends VersionedProtocol {
- public static final long VERSION = 1L;
-}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java
deleted file mode 100644
index decd9bf9f96..00000000000
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/ExecRPCInvoker.java
+++ /dev/null
@@ -1,98 +0,0 @@
-/*
- *
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-package org.apache.hadoop.hbase.ipc;
-
-import java.lang.reflect.InvocationHandler;
-import java.lang.reflect.Method;
-
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.hbase.client.HConnection;
-import org.apache.hadoop.hbase.client.ServerCallable;
-import org.apache.hadoop.hbase.client.coprocessor.Exec;
-import org.apache.hadoop.hbase.client.coprocessor.ExecResult;
-import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.util.Bytes;
-
-/**
- * Backs a {@link CoprocessorProtocol} subclass proxy and forwards method
- * invocations for server execution. Note that internally this will issue a
- * separate RPC call for each method invocation (using a
- * {@link org.apache.hadoop.hbase.client.ServerCallable} instance).
- */
-@InterfaceAudience.Private
-@Deprecated
-public class ExecRPCInvoker implements InvocationHandler {
- // LOG is NOT in hbase subpackage intentionally so that the default HBase
- // DEBUG log level does NOT emit RPC-level logging.
- private static final Log LOG = LogFactory.getLog("org.apache.hadoop.ipc.ExecRPCInvoker");
-
- private Configuration conf;
- private final HConnection connection;
- private Class extends CoprocessorProtocol> protocol;
- private final byte[] table;
- private final byte[] row;
- private byte[] regionName;
-
- public ExecRPCInvoker(Configuration conf,
- HConnection connection,
- Class extends CoprocessorProtocol> protocol,
- byte[] table,
- byte[] row) {
- this.conf = conf;
- this.connection = connection;
- this.protocol = protocol;
- this.table = table;
- this.row = row;
- }
-
- @Override
- public Object invoke(Object instance, final Method method, final Object[] args)
- throws Throwable {
- if (LOG.isDebugEnabled()) {
- LOG.debug("Call: "+method.getName()+", "+(args != null ? args.length : 0));
- }
-
- if (row != null) {
- final Exec exec = new Exec(conf, row, protocol, method, args);
- ServerCallable callable =
- new ServerCallable(connection, table, row) {
- public ExecResult call() throws Exception {
- byte[] regionName = location.getRegionInfo().getRegionName();
- return ProtobufUtil.execCoprocessor(server, exec, regionName);
- }
- };
- ExecResult result = callable.withRetries();
- this.regionName = result.getRegionName();
- LOG.debug("Result is region="+ Bytes.toStringBinary(regionName) +
- ", value="+result.getValue());
- return result.getValue();
- } else if (LOG.isDebugEnabled()) {
- LOG.debug("Null row passed for call");
- }
-
- return null;
- }
-
- public byte[] getRegionName() {
- return regionName;
- }
-}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
index 3d3be4b83ad..01d84281118 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java
@@ -31,7 +31,6 @@ import java.lang.reflect.Method;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
-import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
@@ -39,11 +38,8 @@ import java.util.NavigableMap;
import java.util.NavigableSet;
import java.util.TreeMap;
-import com.google.protobuf.InvalidProtocolBufferException;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.DeserializationException;
import org.apache.hadoop.hbase.DoNotRetryIOException;
-import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
@@ -66,14 +62,11 @@ import org.apache.hadoop.hbase.client.Row;
import org.apache.hadoop.hbase.client.RowLock;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Scan;
-import org.apache.hadoop.hbase.client.coprocessor.Exec;
-import org.apache.hadoop.hbase.client.coprocessor.ExecResult;
import org.apache.hadoop.hbase.client.metrics.ScanMetrics;
import org.apache.hadoop.hbase.filter.ByteArrayComparable;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.HbaseObjectWritable;
import org.apache.hadoop.hbase.io.TimeRange;
-import org.apache.hadoop.hbase.ipc.CoprocessorProtocol;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos;
import org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.AccessControlService;
import org.apache.hadoop.hbase.protobuf.generated.AdminProtos.CloseRegionRequest;
@@ -102,8 +95,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Column;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceCall;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.CoprocessorServiceResponse;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorRequest;
-import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.ExecCoprocessorResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetRequest;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.GetResponse;
import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.MultiRequest;
@@ -115,7 +106,6 @@ import org.apache.hadoop.hbase.protobuf.generated.ClientProtos.Mutate.MutateType
import org.apache.hadoop.hbase.protobuf.generated.ComparatorProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameBytesPair;
-import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.NameStringPair;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionInfo;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionLoad;
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.RegionSpecifier;
@@ -133,23 +123,23 @@ import org.apache.hadoop.hbase.security.token.AuthenticationTokenIdentifier;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Methods;
import org.apache.hadoop.hbase.util.Pair;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.token.Token;
import org.apache.hbase.Cell;
import com.google.common.collect.ArrayListMultimap;
import com.google.common.collect.ListMultimap;
import com.google.common.collect.Lists;
import com.google.protobuf.ByteString;
+import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import com.google.protobuf.RpcChannel;
import com.google.protobuf.Service;
import com.google.protobuf.ServiceException;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.security.token.Token;
/**
* Protobufs utility.
*/
-@SuppressWarnings("deprecation")
public final class ProtobufUtil {
private ProtobufUtil() {
@@ -234,53 +224,6 @@ public final class ProtobufUtil {
return e instanceof IOException ? (IOException) e : new IOException(se);
}
- /**
- * Convert a protocol buffer Exec to a client Exec
- *
- * @param proto the protocol buffer Exec to convert
- * @return the converted client Exec
- */
- @SuppressWarnings("unchecked")
- @Deprecated
- public static Exec toExec(
- final ClientProtos.Exec proto) throws IOException {
- byte[] row = proto.getRow().toByteArray();
- String protocolName = proto.getProtocolName();
- String methodName = proto.getMethodName();
- List